diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json new file mode 100644 index 0000000000000..01c6bdcd5fe3d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.7.2", + "use": [ + "@autorest/python@5.16.0", + "@autorest/modelerfour@4.19.3" + ], + "commit": "9f7c56cbed66f1a1589b855b18075c2de726dd02", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/machinelearningservices/resource-manager/readme.md --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --python3-only --use=@autorest/python@5.16.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2", + "readme": "specification/machinelearningservices/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py index 67feb9a70351f..bbe4333504ff4 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/__init__.py @@ -10,10 +10,14 @@ from ._version import VERSION __version__ = VERSION -__all__ = ['AzureMachineLearningWorkspaces'] try: - from ._patch import patch_sdk # type: ignore - patch_sdk() + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import except ImportError: - pass + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk +__all__ = ['AzureMachineLearningWorkspaces'] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py index 527b9e2bb787c..3a24d62577115 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py @@ -6,106 +6,235 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from copy import deepcopy +from typing import Any, TYPE_CHECKING -from azure.mgmt.core import ARMPipelineClient from msrest import Deserializer, Serializer -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Optional - - from azure.core.credentials import TokenCredential +from azure.core.rest import HttpRequest, HttpResponse +from azure.mgmt.core import ARMPipelineClient -from ._configuration import AzureMachineLearningWorkspacesConfiguration -from .operations import Operations -from .operations import WorkspacesOperations -from .operations import WorkspaceFeaturesOperations -from .operations import NotebooksOperations -from .operations import UsagesOperations -from .operations import VirtualMachineSizesOperations -from .operations import QuotasOperations -from .operations import WorkspaceConnectionsOperations -from .operations import MachineLearningComputeOperations -from .operations import AzureMachineLearningWorkspacesOperationsMixin -from .operations import PrivateEndpointConnectionsOperations -from .operations import PrivateLinkResourcesOperations from . import models +from ._configuration import AzureMachineLearningWorkspacesConfiguration +from .operations import BatchDeploymentsOperations, BatchEndpointsOperations, CodeContainersOperations, CodeVersionsOperations, ComponentContainersOperations, ComponentVersionsOperations, ComputeOperations, DataContainersOperations, DataVersionsOperations, DatastoresOperations, EnvironmentContainersOperations, EnvironmentVersionsOperations, JobsOperations, LabelingJobsOperations, ModelContainersOperations, ModelVersionsOperations, OnlineDeploymentsOperations, OnlineEndpointsOperations, Operations, PrivateEndpointConnectionsOperations, PrivateLinkResourcesOperations, QuotasOperations, SchedulesOperations, UsagesOperations, VirtualMachineSizesOperations, WorkspaceConnectionsOperations, WorkspaceFeaturesOperations, WorkspacesOperations +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials import TokenCredential -class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin): +class AzureMachineLearningWorkspaces: # pylint: disable=too-many-instance-attributes """These APIs allow end users to operate on Azure Machine Learning Workspace resources. :ivar operations: Operations operations :vartype operations: azure.mgmt.machinelearningservices.operations.Operations :ivar workspaces: WorkspacesOperations operations :vartype workspaces: azure.mgmt.machinelearningservices.operations.WorkspacesOperations - :ivar workspace_features: WorkspaceFeaturesOperations operations - :vartype workspace_features: azure.mgmt.machinelearningservices.operations.WorkspaceFeaturesOperations - :ivar notebooks: NotebooksOperations operations - :vartype notebooks: azure.mgmt.machinelearningservices.operations.NotebooksOperations :ivar usages: UsagesOperations operations :vartype usages: azure.mgmt.machinelearningservices.operations.UsagesOperations :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations - :vartype virtual_machine_sizes: azure.mgmt.machinelearningservices.operations.VirtualMachineSizesOperations + :vartype virtual_machine_sizes: + azure.mgmt.machinelearningservices.operations.VirtualMachineSizesOperations :ivar quotas: QuotasOperations operations :vartype quotas: azure.mgmt.machinelearningservices.operations.QuotasOperations - :ivar workspace_connections: WorkspaceConnectionsOperations operations - :vartype workspace_connections: azure.mgmt.machinelearningservices.operations.WorkspaceConnectionsOperations - :ivar machine_learning_compute: MachineLearningComputeOperations operations - :vartype machine_learning_compute: azure.mgmt.machinelearningservices.operations.MachineLearningComputeOperations + :ivar compute: ComputeOperations operations + :vartype compute: azure.mgmt.machinelearningservices.operations.ComputeOperations :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: azure.mgmt.machinelearningservices.operations.PrivateEndpointConnectionsOperations + :vartype private_endpoint_connections: + azure.mgmt.machinelearningservices.operations.PrivateEndpointConnectionsOperations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure.mgmt.machinelearningservices.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: + azure.mgmt.machinelearningservices.operations.PrivateLinkResourcesOperations + :ivar workspace_connections: WorkspaceConnectionsOperations operations + :vartype workspace_connections: + azure.mgmt.machinelearningservices.operations.WorkspaceConnectionsOperations + :ivar batch_endpoints: BatchEndpointsOperations operations + :vartype batch_endpoints: + azure.mgmt.machinelearningservices.operations.BatchEndpointsOperations + :ivar batch_deployments: BatchDeploymentsOperations operations + :vartype batch_deployments: + azure.mgmt.machinelearningservices.operations.BatchDeploymentsOperations + :ivar code_containers: CodeContainersOperations operations + :vartype code_containers: + azure.mgmt.machinelearningservices.operations.CodeContainersOperations + :ivar code_versions: CodeVersionsOperations operations + :vartype code_versions: azure.mgmt.machinelearningservices.operations.CodeVersionsOperations + :ivar component_containers: ComponentContainersOperations operations + :vartype component_containers: + azure.mgmt.machinelearningservices.operations.ComponentContainersOperations + :ivar component_versions: ComponentVersionsOperations operations + :vartype component_versions: + azure.mgmt.machinelearningservices.operations.ComponentVersionsOperations + :ivar data_containers: DataContainersOperations operations + :vartype data_containers: + azure.mgmt.machinelearningservices.operations.DataContainersOperations + :ivar data_versions: DataVersionsOperations operations + :vartype data_versions: azure.mgmt.machinelearningservices.operations.DataVersionsOperations + :ivar datastores: DatastoresOperations operations + :vartype datastores: azure.mgmt.machinelearningservices.operations.DatastoresOperations + :ivar environment_containers: EnvironmentContainersOperations operations + :vartype environment_containers: + azure.mgmt.machinelearningservices.operations.EnvironmentContainersOperations + :ivar environment_versions: EnvironmentVersionsOperations operations + :vartype environment_versions: + azure.mgmt.machinelearningservices.operations.EnvironmentVersionsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: azure.mgmt.machinelearningservices.operations.JobsOperations + :ivar labeling_jobs: LabelingJobsOperations operations + :vartype labeling_jobs: azure.mgmt.machinelearningservices.operations.LabelingJobsOperations + :ivar model_containers: ModelContainersOperations operations + :vartype model_containers: + azure.mgmt.machinelearningservices.operations.ModelContainersOperations + :ivar model_versions: ModelVersionsOperations operations + :vartype model_versions: azure.mgmt.machinelearningservices.operations.ModelVersionsOperations + :ivar online_endpoints: OnlineEndpointsOperations operations + :vartype online_endpoints: + azure.mgmt.machinelearningservices.operations.OnlineEndpointsOperations + :ivar online_deployments: OnlineDeploymentsOperations operations + :vartype online_deployments: + azure.mgmt.machinelearningservices.operations.OnlineDeploymentsOperations + :ivar schedules: SchedulesOperations operations + :vartype schedules: azure.mgmt.machinelearningservices.operations.SchedulesOperations + :ivar workspace_features: WorkspaceFeaturesOperations operations + :vartype workspace_features: + azure.mgmt.machinelearningservices.operations.WorkspaceFeaturesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-06-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - if not base_url: - base_url = 'https://management.azure.com' - self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - + self._serialize.client_side_validation = False self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.workspaces = WorkspacesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_features = WorkspaceFeaturesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.notebooks = NotebooksOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.usages = UsagesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.virtual_machine_sizes = VirtualMachineSizesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.quotas = QuotasOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_connections = WorkspaceConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.machine_learning_compute = MachineLearningComputeOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) + self.compute = ComputeOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.batch_endpoints = BatchEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.batch_deployments = BatchDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.code_containers = CodeContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.code_versions = CodeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_containers = ComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_versions = ComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.data_containers = DataContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.data_versions = DataVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.datastores = DatastoresOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_containers = EnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_versions = EnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.labeling_jobs = LabelingJobsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.model_containers = ModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.model_versions = ModelVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_endpoints = OnlineEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_deployments = OnlineDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.schedules = SchedulesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_features = WorkspaceFeaturesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) def close(self): # type: () -> None diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py index 79ded5841781c..d896f2a743cc8 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py @@ -6,22 +6,20 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any - from azure.core.credentials import TokenCredential -class AzureMachineLearningWorkspacesConfiguration(Configuration): +class AzureMachineLearningWorkspacesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for AzureMachineLearningWorkspaces. Note that all parameters used to create this instance are saved as instance @@ -29,26 +27,30 @@ class AzureMachineLearningWorkspacesConfiguration(Configuration): :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-06-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__( self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + credential: "TokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2022-06-01-preview") # type: str + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.api_version = "2020-08-01" + self.api_version = api_version self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION)) self._configure(**kwargs) @@ -68,4 +70,4 @@ def _configure( self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_patch.py new file mode 100644 index 0000000000000..0ad201a8c586e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py new file mode 100644 index 0000000000000..138f663c53a4e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [ + c for c in formatted_components if "{}".format(key.args[0]) not in c + ] + template = "/".join(components) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py index c47f66669f1bf..e5754a47ce68f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +VERSION = "1.0.0b1" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py index 872474577c4fa..6aed0faa898de 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/__init__.py @@ -7,4 +7,14 @@ # -------------------------------------------------------------------------- from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk __all__ = ['AzureMachineLearningWorkspaces'] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py index 77636808b2d60..74d3fcaf6301f 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_azure_machine_learning_workspaces.py @@ -6,103 +6,239 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Optional, TYPE_CHECKING +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING -from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient -from ._configuration import AzureMachineLearningWorkspacesConfiguration -from .operations import Operations -from .operations import WorkspacesOperations -from .operations import WorkspaceFeaturesOperations -from .operations import NotebooksOperations -from .operations import UsagesOperations -from .operations import VirtualMachineSizesOperations -from .operations import QuotasOperations -from .operations import WorkspaceConnectionsOperations -from .operations import MachineLearningComputeOperations -from .operations import AzureMachineLearningWorkspacesOperationsMixin -from .operations import PrivateEndpointConnectionsOperations -from .operations import PrivateLinkResourcesOperations from .. import models +from ._configuration import AzureMachineLearningWorkspacesConfiguration +from .operations import BatchDeploymentsOperations, BatchEndpointsOperations, CodeContainersOperations, CodeVersionsOperations, ComponentContainersOperations, ComponentVersionsOperations, ComputeOperations, DataContainersOperations, DataVersionsOperations, DatastoresOperations, EnvironmentContainersOperations, EnvironmentVersionsOperations, JobsOperations, LabelingJobsOperations, ModelContainersOperations, ModelVersionsOperations, OnlineDeploymentsOperations, OnlineEndpointsOperations, Operations, PrivateEndpointConnectionsOperations, PrivateLinkResourcesOperations, QuotasOperations, SchedulesOperations, UsagesOperations, VirtualMachineSizesOperations, WorkspaceConnectionsOperations, WorkspaceFeaturesOperations, WorkspacesOperations +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential -class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin): +class AzureMachineLearningWorkspaces: # pylint: disable=too-many-instance-attributes """These APIs allow end users to operate on Azure Machine Learning Workspace resources. :ivar operations: Operations operations :vartype operations: azure.mgmt.machinelearningservices.aio.operations.Operations :ivar workspaces: WorkspacesOperations operations :vartype workspaces: azure.mgmt.machinelearningservices.aio.operations.WorkspacesOperations - :ivar workspace_features: WorkspaceFeaturesOperations operations - :vartype workspace_features: azure.mgmt.machinelearningservices.aio.operations.WorkspaceFeaturesOperations - :ivar notebooks: NotebooksOperations operations - :vartype notebooks: azure.mgmt.machinelearningservices.aio.operations.NotebooksOperations :ivar usages: UsagesOperations operations :vartype usages: azure.mgmt.machinelearningservices.aio.operations.UsagesOperations :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations - :vartype virtual_machine_sizes: azure.mgmt.machinelearningservices.aio.operations.VirtualMachineSizesOperations + :vartype virtual_machine_sizes: + azure.mgmt.machinelearningservices.aio.operations.VirtualMachineSizesOperations :ivar quotas: QuotasOperations operations :vartype quotas: azure.mgmt.machinelearningservices.aio.operations.QuotasOperations - :ivar workspace_connections: WorkspaceConnectionsOperations operations - :vartype workspace_connections: azure.mgmt.machinelearningservices.aio.operations.WorkspaceConnectionsOperations - :ivar machine_learning_compute: MachineLearningComputeOperations operations - :vartype machine_learning_compute: azure.mgmt.machinelearningservices.aio.operations.MachineLearningComputeOperations + :ivar compute: ComputeOperations operations + :vartype compute: azure.mgmt.machinelearningservices.aio.operations.ComputeOperations :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: azure.mgmt.machinelearningservices.aio.operations.PrivateEndpointConnectionsOperations + :vartype private_endpoint_connections: + azure.mgmt.machinelearningservices.aio.operations.PrivateEndpointConnectionsOperations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure.mgmt.machinelearningservices.aio.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: + azure.mgmt.machinelearningservices.aio.operations.PrivateLinkResourcesOperations + :ivar workspace_connections: WorkspaceConnectionsOperations operations + :vartype workspace_connections: + azure.mgmt.machinelearningservices.aio.operations.WorkspaceConnectionsOperations + :ivar batch_endpoints: BatchEndpointsOperations operations + :vartype batch_endpoints: + azure.mgmt.machinelearningservices.aio.operations.BatchEndpointsOperations + :ivar batch_deployments: BatchDeploymentsOperations operations + :vartype batch_deployments: + azure.mgmt.machinelearningservices.aio.operations.BatchDeploymentsOperations + :ivar code_containers: CodeContainersOperations operations + :vartype code_containers: + azure.mgmt.machinelearningservices.aio.operations.CodeContainersOperations + :ivar code_versions: CodeVersionsOperations operations + :vartype code_versions: + azure.mgmt.machinelearningservices.aio.operations.CodeVersionsOperations + :ivar component_containers: ComponentContainersOperations operations + :vartype component_containers: + azure.mgmt.machinelearningservices.aio.operations.ComponentContainersOperations + :ivar component_versions: ComponentVersionsOperations operations + :vartype component_versions: + azure.mgmt.machinelearningservices.aio.operations.ComponentVersionsOperations + :ivar data_containers: DataContainersOperations operations + :vartype data_containers: + azure.mgmt.machinelearningservices.aio.operations.DataContainersOperations + :ivar data_versions: DataVersionsOperations operations + :vartype data_versions: + azure.mgmt.machinelearningservices.aio.operations.DataVersionsOperations + :ivar datastores: DatastoresOperations operations + :vartype datastores: azure.mgmt.machinelearningservices.aio.operations.DatastoresOperations + :ivar environment_containers: EnvironmentContainersOperations operations + :vartype environment_containers: + azure.mgmt.machinelearningservices.aio.operations.EnvironmentContainersOperations + :ivar environment_versions: EnvironmentVersionsOperations operations + :vartype environment_versions: + azure.mgmt.machinelearningservices.aio.operations.EnvironmentVersionsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: azure.mgmt.machinelearningservices.aio.operations.JobsOperations + :ivar labeling_jobs: LabelingJobsOperations operations + :vartype labeling_jobs: + azure.mgmt.machinelearningservices.aio.operations.LabelingJobsOperations + :ivar model_containers: ModelContainersOperations operations + :vartype model_containers: + azure.mgmt.machinelearningservices.aio.operations.ModelContainersOperations + :ivar model_versions: ModelVersionsOperations operations + :vartype model_versions: + azure.mgmt.machinelearningservices.aio.operations.ModelVersionsOperations + :ivar online_endpoints: OnlineEndpointsOperations operations + :vartype online_endpoints: + azure.mgmt.machinelearningservices.aio.operations.OnlineEndpointsOperations + :ivar online_deployments: OnlineDeploymentsOperations operations + :vartype online_deployments: + azure.mgmt.machinelearningservices.aio.operations.OnlineDeploymentsOperations + :ivar schedules: SchedulesOperations operations + :vartype schedules: azure.mgmt.machinelearningservices.aio.operations.SchedulesOperations + :ivar workspace_features: WorkspaceFeaturesOperations operations + :vartype workspace_features: + azure.mgmt.machinelearningservices.aio.operations.WorkspaceFeaturesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-06-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, - base_url: Optional[str] = None, + base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) + self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - + self._serialize.client_side_validation = False self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.workspaces = WorkspacesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_features = WorkspaceFeaturesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.notebooks = NotebooksOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.usages = UsagesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.virtual_machine_sizes = VirtualMachineSizesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.quotas = QuotasOperations( - self._client, self._config, self._serialize, self._deserialize) - self.workspace_connections = WorkspaceConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.machine_learning_compute = MachineLearningComputeOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) + self.compute = ComputeOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.batch_endpoints = BatchEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.batch_deployments = BatchDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.code_containers = CodeContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.code_versions = CodeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_containers = ComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_versions = ComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.data_containers = DataContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.data_versions = DataVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.datastores = DatastoresOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_containers = EnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_versions = EnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.labeling_jobs = LabelingJobsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.model_containers = ModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.model_versions = ModelVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_endpoints = OnlineEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_deployments = OnlineDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.schedules = SchedulesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.workspace_features = WorkspaceFeaturesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + + def _send_request( + self, + request: HttpRequest, + **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py index a11cb071c3265..5dc2064cf63f0 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_configuration.py @@ -10,7 +10,7 @@ from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy from .._version import VERSION @@ -19,7 +19,7 @@ from azure.core.credentials_async import AsyncTokenCredential -class AzureMachineLearningWorkspacesConfiguration(Configuration): +class AzureMachineLearningWorkspacesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for AzureMachineLearningWorkspaces. Note that all parameters used to create this instance are saved as instance @@ -27,8 +27,11 @@ class AzureMachineLearningWorkspacesConfiguration(Configuration): :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-06-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ def __init__( @@ -37,15 +40,17 @@ def __init__( subscription_id: str, **kwargs: Any ) -> None: + super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop('api_version', "2022-06-01-preview") # type: str + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.api_version = "2020-08-01" + self.api_version = api_version self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION)) self._configure(**kwargs) @@ -64,4 +69,4 @@ def _configure( self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_patch.py new file mode 100644 index 0000000000000..0ad201a8c586e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py index 516999b100d82..a02156aa11a5b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/__init__.py @@ -8,28 +8,65 @@ from ._operations import Operations from ._workspaces_operations import WorkspacesOperations -from ._workspace_features_operations import WorkspaceFeaturesOperations -from ._notebooks_operations import NotebooksOperations from ._usages_operations import UsagesOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations from ._quotas_operations import QuotasOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations -from ._machine_learning_compute_operations import MachineLearningComputeOperations -from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin +from ._compute_operations import ComputeOperations from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._batch_endpoints_operations import BatchEndpointsOperations +from ._batch_deployments_operations import BatchDeploymentsOperations +from ._code_containers_operations import CodeContainersOperations +from ._code_versions_operations import CodeVersionsOperations +from ._component_containers_operations import ComponentContainersOperations +from ._component_versions_operations import ComponentVersionsOperations +from ._data_containers_operations import DataContainersOperations +from ._data_versions_operations import DataVersionsOperations +from ._datastores_operations import DatastoresOperations +from ._environment_containers_operations import EnvironmentContainersOperations +from ._environment_versions_operations import EnvironmentVersionsOperations +from ._jobs_operations import JobsOperations +from ._labeling_jobs_operations import LabelingJobsOperations +from ._model_containers_operations import ModelContainersOperations +from ._model_versions_operations import ModelVersionsOperations +from ._online_endpoints_operations import OnlineEndpointsOperations +from ._online_deployments_operations import OnlineDeploymentsOperations +from ._schedules_operations import SchedulesOperations +from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ 'Operations', 'WorkspacesOperations', - 'WorkspaceFeaturesOperations', - 'NotebooksOperations', 'UsagesOperations', 'VirtualMachineSizesOperations', 'QuotasOperations', - 'WorkspaceConnectionsOperations', - 'MachineLearningComputeOperations', - 'AzureMachineLearningWorkspacesOperationsMixin', + 'ComputeOperations', 'PrivateEndpointConnectionsOperations', 'PrivateLinkResourcesOperations', + 'WorkspaceConnectionsOperations', + 'BatchEndpointsOperations', + 'BatchDeploymentsOperations', + 'CodeContainersOperations', + 'CodeVersionsOperations', + 'ComponentContainersOperations', + 'ComponentVersionsOperations', + 'DataContainersOperations', + 'DataVersionsOperations', + 'DatastoresOperations', + 'EnvironmentContainersOperations', + 'EnvironmentVersionsOperations', + 'JobsOperations', + 'LabelingJobsOperations', + 'ModelContainersOperations', + 'ModelVersionsOperations', + 'OnlineEndpointsOperations', + 'OnlineDeploymentsOperations', + 'SchedulesOperations', + 'WorkspaceFeaturesOperations', ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py deleted file mode 100644 index dd37ffc498c38..0000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_azure_machine_learning_workspaces_operations.py +++ /dev/null @@ -1,89 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.mgmt.core.exceptions import ARMErrorFormat - -from ... import models as _models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class AzureMachineLearningWorkspacesOperationsMixin: - - def list_skus( - self, - **kwargs - ) -> AsyncIterable["_models.SkuListResult"]: - """Lists all skus with associated features. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either SkuListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuListResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SkuListResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('SkuListResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_deployments_operations.py new file mode 100644 index 0000000000000..50e6a9d8a1a5c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_deployments_operations.py @@ -0,0 +1,699 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._batch_deployments_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_update_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BatchDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`batch_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.BatchDeploymentTrackedResourceArmPaginatedResult]: + """Lists Batch inference deployments in the workspace. + + Lists Batch inference deployments in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Endpoint name. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchDeploymentTrackedResourceArmPaginatedResult + or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BatchDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Batch Inference deployment (asynchronous). + + Delete Batch Inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Endpoint name. + :type endpoint_name: str + :param deployment_name: Inference deployment identifier. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> _models.BatchDeployment: + """Gets a batch inference deployment by id. + + Gets a batch inference deployment by id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Endpoint name. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch deployments. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchDeployment, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + **kwargs: Any + ) -> Optional[_models.BatchDeployment]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.BatchDeployment]] + + _json = self._serialize.body(body, 'PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. + :type deployment_name: str + :param body: Batch inference deployment definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + **kwargs: Any + ) -> _models.BatchDeployment: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + + _json = self._serialize.body(body, 'BatchDeployment') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. + :type deployment_name: str + :param body: Batch inference deployment definition object. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_endpoints_operations.py new file mode 100644 index 0000000000000..e1fa00e49a191 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_batch_endpoints_operations.py @@ -0,0 +1,737 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._batch_endpoints_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_keys_request, build_list_request, build_update_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BatchEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`batch_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.BatchEndpointTrackedResourceArmPaginatedResult]: + """Lists Batch inference endpoint in the workspace. + + Lists Batch inference endpoint in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchEndpointTrackedResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + count=count, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + count=count, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("BatchEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Batch Inference Endpoint (asynchronous). + + Delete Batch Inference Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.BatchEndpoint: + """Gets a batch inference endpoint by name. + + Gets a batch inference endpoint by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Name for the Batch Endpoint. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchEndpoint, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> Optional[_models.BatchEndpoint]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.BatchEndpoint]] + + _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + **kwargs: Any + ) -> _models.BatchEndpoint: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + + _json = self._serialize.body(body, 'BatchEndpoint') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def list_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.EndpointAuthKeys: + """Lists batch Inference Endpoint keys. + + Lists batch Inference Endpoint keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EndpointAuthKeys] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_containers_operations.py new file mode 100644 index 0000000000000..1005fd62049e7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_containers_operations.py @@ -0,0 +1,360 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._code_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class CodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`code_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.CodeContainerResourceArmPaginatedResult]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.CodeContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CodeContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.CodeContainer, + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeContainer] + + _json = self._serialize.body(body, 'CodeContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('CodeContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('CodeContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_versions_operations.py new file mode 100644 index 0000000000000..bb3d1263b4cc8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_code_versions_operations.py @@ -0,0 +1,387 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._code_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class CodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`code_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.CodeVersionResourceArmPaginatedResult]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.CodeVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CodeVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.CodeVersion, + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeVersion] + + _json = self._serialize.body(body, 'CodeVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('CodeVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('CodeVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_containers_operations.py new file mode 100644 index 0000000000000..597be77fd2054 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_containers_operations.py @@ -0,0 +1,366 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._component_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`component_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.ComponentContainerResourceArmPaginatedResult]: + """List component containers. + + List component containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.ComponentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComponentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ComponentContainer, + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentContainer] + + _json = self._serialize.body(body, 'ComponentContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ComponentContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ComponentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py new file mode 100644 index 0000000000000..e2d5366d9c1f3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_component_versions_operations.py @@ -0,0 +1,393 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._component_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`component_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.ComponentVersionResourceArmPaginatedResult]: + """List component versions. + + List component versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Component name. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.ComponentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComponentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ComponentVersion, + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentVersion] + + _json = self._serialize.body(body, 'ComponentVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ComponentVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ComponentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py new file mode 100644 index 0000000000000..607adb24b4ff6 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_compute_operations.py @@ -0,0 +1,1336 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._compute_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_keys_request, build_list_nodes_request, build_list_request, build_restart_request_initial, build_start_request_initial, build_stop_request_initial, build_update_custom_services_request, build_update_idle_shutdown_setting_request, build_update_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ComputeOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`compute` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.PaginatedComputeResourcesList]: + """Gets computes in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PaginatedComputeResourcesList or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PaginatedComputeResourcesList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("PaginatedComputeResourcesList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> _models.ComputeResource: + """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are + not returned - use 'keys' nested resource to get them. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeResource, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + + _json = self._serialize.body(parameters, 'ComputeResource') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if response.status_code == 201: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ComputeResource', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + + _json = self._serialize.body(parameters, 'ClusterUpdateParameters') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param parameters: Additional parameters for cluster update. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ComputeResource', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + underlying_resource_action=underlying_resource_action, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes specified Machine Learning compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the + underlying compute from workspace if 'Detach'. + :type underlying_resource_action: str or + ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @distributed_trace_async + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: List[_models.CustomService], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param custom_services: New list of Custom Services. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + + _json = self._serialize.body(custom_services, '[CustomService]') + + request = build_update_custom_services_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.update_custom_services.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_custom_services.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices"} # type: ignore + + + @distributed_trace + def list_nodes( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> AsyncIterable[_models.AmlComputeNodesInformation]: + """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AmlComputeNodesInformation or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.AmlComputeNodesInformation] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_nodes_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self.list_nodes.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_nodes_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("AmlComputeNodesInformation", pipeline_response) + list_of_elem = deserialized.nodes + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list_nodes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes"} # type: ignore + + @distributed_trace_async + async def list_keys( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> _models.ComputeSecrets: + """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeSecrets, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeSecrets] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeSecrets', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys"} # type: ignore + + + async def _start_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self._start_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + + @distributed_trace_async + async def begin_start( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Posts a start action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._start_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + async def _stop_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self._stop_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + + @distributed_trace_async + async def begin_stop( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Posts a stop action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + async def _restart_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_restart_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self._restart_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _restart_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + + @distributed_trace_async + async def begin_restart( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Posts a restart action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._restart_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_restart.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + @distributed_trace_async + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.IdleShutdownSetting, + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + + _json = self._serialize.body(parameters, 'IdleShutdownSetting') + + request = build_update_idle_shutdown_setting_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.update_idle_shutdown_setting.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_idle_shutdown_setting.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_containers_operations.py new file mode 100644 index 0000000000000..71d108a5d2a4b --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_containers_operations.py @@ -0,0 +1,366 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._data_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`data_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.DataContainerResourceArmPaginatedResult]: + """List data containers. + + List data containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.DataContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.DataContainer, + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataContainer] + + _json = self._serialize.body(body, 'DataContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py new file mode 100644 index 0000000000000..5368f1f794dd5 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_data_versions_operations.py @@ -0,0 +1,403 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._data_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`data_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.DataVersionBaseResourceArmPaginatedResult]: + """List data versions in the data container. + + List data versions in the data container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Data container's name. + :type name: str + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. + :type order_by: str + :param top: Top count of results, top count cannot be greater than the page size. + If topCount > page size, results with be default page size count + will be returned. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Default + value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataVersionBaseResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBaseResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.DataVersionBase: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataVersionBase] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataVersionBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataVersionBase] + + _json = self._serialize.body(body, 'DataVersionBase') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataVersionBase', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataVersionBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_datastores_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_datastores_operations.py new file mode 100644 index 0000000000000..fe29e8d62e923 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_datastores_operations.py @@ -0,0 +1,464 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._datastores_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request, build_list_secrets_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DatastoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`datastores` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: Optional[bool] = False, + **kwargs: Any + ) -> AsyncIterable[_models.DatastoreResourceArmPaginatedResult]: + """List datastores. + + List datastores. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is 30. + :type count: int + :param is_default: Filter down to the workspace default datastore. Default value is None. + :type is_default: bool + :param names: Names of datastores to return. Default value is None. + :type names: list[str] + :param search_text: Text to search for in the datastore names. Default value is None. + :type search_text: str + :param order_by: Order by property (createdtime | modifiedtime | name). Default value is None. + :type order_by: str + :param order_by_asc: Order by property in ascending order. Default value is False. + :type order_by_asc: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DatastoreResourceArmPaginatedResult or the result + of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DatastoreResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DatastoreResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + is_default=is_default, + names=names, + search_text=search_text, + order_by=order_by, + order_by_asc=order_by_asc, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + is_default=is_default, + names=names, + search_text=search_text, + order_by=order_by, + order_by_asc=order_by_asc, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("DatastoreResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete datastore. + + Delete datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.Datastore: + """Get datastore. + + Get datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Datastore] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Datastore', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Datastore, + skip_validation: Optional[bool] = False, + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :param body: Datastore entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Datastore] + + _json = self._serialize.body(body, 'Datastore') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + skip_validation=skip_validation, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Datastore', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Datastore', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + + @distributed_trace_async + async def list_secrets( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.DatastoreSecrets: + """Get datastore secrets. + + Get datastore secrets. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatastoreSecrets, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DatastoreSecrets + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DatastoreSecrets] + + + request = build_list_secrets_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.list_secrets.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DatastoreSecrets', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_secrets.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_containers_operations.py new file mode 100644 index 0000000000000..987d60460bc7c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_containers_operations.py @@ -0,0 +1,366 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._environment_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class EnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`environment_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.EnvironmentContainerResourceArmPaginatedResult]: + """List environment containers. + + List environment containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.EnvironmentContainer, + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentContainer] + + _json = self._serialize.body(body, 'EnvironmentContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py new file mode 100644 index 0000000000000..d979765a4fe87 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_environment_versions_operations.py @@ -0,0 +1,393 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._environment_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class EnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`environment_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.EnvironmentVersionResourceArmPaginatedResult]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentVersionResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.EnvironmentVersion, + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. + :type name: str + :param version: Version of EnvironmentVersion. + :type version: str + :param body: Definition of EnvironmentVersion. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentVersion] + + _json = self._serialize.body(body, 'EnvironmentVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py new file mode 100644 index 0000000000000..64b04d8e7603a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_jobs_operations.py @@ -0,0 +1,591 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._jobs_operations import build_cancel_request_initial, build_create_or_update_request, build_delete_request_initial, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class JobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.JobBaseResourceArmPaginatedResult]: + """Lists Jobs in the workspace. + + Lists Jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param job_type: Type of job to be returned. Default value is None. + :type job_type: str + :param tag: Jobs returned will have this tag key. Default value is None. + :type tag: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param scheduled: Indicator whether the job is scheduled job. Default value is None. + :type scheduled: bool + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. + :type schedule_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.JobBaseResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.JobBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + job_type=job_type, + tag=tag, + list_view_type=list_view_type, + scheduled=scheduled, + schedule_id=schedule_id, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + job_type=job_type, + tag=tag, + list_view_type=list_view_type, + scheduled=scheduled, + schedule_id=schedule_id, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("JobBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes a Job (asynchronous). + + Deletes a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> _models.JobBase: + """Gets a Job by name/id. + + Gets a Job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.JobBase] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.JobBase, + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :param body: Job definition object. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.JobBase] + + _json = self._serialize.body(body, 'JobBase') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('JobBase', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('JobBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + + async def _cancel_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_cancel_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self._cancel_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _cancel_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore + + + @distributed_trace_async + async def begin_cancel( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Cancels a Job (asynchronous). + + Cancels a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._cancel_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py new file mode 100644 index 0000000000000..137335231aea4 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_labeling_jobs_operations.py @@ -0,0 +1,806 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._labeling_jobs_operations import build_create_or_update_request_initial, build_delete_request, build_export_labels_request_initial, build_get_request, build_list_request, build_pause_request, build_resume_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`labeling_jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable[_models.LabelingJobResourceArmPaginatedResult]: + """Lists labeling jobs in the workspace. + + Lists labeling jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Number of labeling jobs to return. Default value is None. + :type count: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJobResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJobResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + """Delete a labeling job. + + Delete a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + id: str, + include_job_instructions: Optional[bool] = False, + include_label_categories: Optional[bool] = False, + **kwargs: Any + ) -> _models.LabelingJob: + """Gets a labeling job by name/id. + + Gets a labeling job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :param include_job_instructions: Boolean value to indicate whether to include JobInstructions + in response. Default value is False. + :type include_job_instructions: bool + :param include_label_categories: Boolean value to indicate Whether to include LabelCategories + in response. Default value is False. + :type include_label_categories: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJob, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJob] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + include_job_instructions=include_job_instructions, + include_label_categories=include_label_categories, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LabelingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + **kwargs: Any + ) -> _models.LabelingJob: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJob] + + _json = self._serialize.body(body, 'LabelingJob') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('LabelingJob', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('LabelingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :param body: LabelingJob definition object. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJob] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LabelingJob', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + async def _export_labels_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + **kwargs: Any + ) -> Optional[_models.ExportSummary]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.ExportSummary]] + + _json = self._serialize.body(body, 'ExportSummary') + + request = build_export_labels_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._export_labels_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('ExportSummary', pipeline_response) + + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _export_labels_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + + @distributed_trace_async + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :param body: The export summary. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ExportSummary] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._export_labels_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ExportSummary', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_export_labels.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + @distributed_trace_async + async def pause( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + """Pause a labeling job. + + Pause a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_pause_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self.pause.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + pause.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause"} # type: ignore + + + async def _resume_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_resume_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self._resume_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _resume_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore + + + @distributed_trace_async + async def begin_resume( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Resume a labeling job (asynchronous). + + Resume a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._resume_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_resume.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_machine_learning_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_machine_learning_compute_operations.py deleted file mode 100644 index 3efc5f75d467c..0000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_machine_learning_compute_operations.py +++ /dev/null @@ -1,895 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class MachineLearningComputeOperations: - """MachineLearningComputeOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_workspace( - self, - resource_group_name: str, - workspace_name: str, - skiptoken: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.PaginatedComputeResourcesList"]: - """Gets computes in specified workspace. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedComputeResourcesList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore - - async def get( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> "_models.ComputeResource": - """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are - not returned - use 'keys' nested resource to get them. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeResource, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def _create_or_update_initial( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ComputeResource", - **kwargs - ) -> "_models.ComputeResource": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ComputeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if response.status_code == 201: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def begin_create_or_update( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ComputeResource", - **kwargs - ) -> AsyncLROPoller["_models.ComputeResource"]: - """Creates or updates compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify - that it does not exist yet. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Payload with Machine Learning compute definition. - :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._create_or_update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def _update_initial( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ClusterUpdateParameters", - **kwargs - ) -> "_models.ComputeResource": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ClusterUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def begin_update( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - parameters: "_models.ClusterUpdateParameters", - **kwargs - ) -> AsyncLROPoller["_models.ComputeResource"]: - """Updates properties of a compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Additional parameters for cluster update. - :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def _delete_initial( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - - if cls: - return cls(pipeline_response, None, response_headers) - - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def begin_delete( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], - **kwargs - ) -> AsyncLROPoller[None]: - """Deletes specified Machine Learning compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the - underlying compute from workspace if 'Detach'. - :type underlying_resource_action: str or ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - underlying_resource_action=underlying_resource_action, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - async def list_nodes( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> "_models.AmlComputeNodesInformation": - """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AmlComputeNodesInformation, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlComputeNodesInformation"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_nodes.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore - - async def list_keys( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> "_models.ComputeSecrets": - """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeSecrets, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeSecrets"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeSecrets', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore - - async def start( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> None: - """Posts a start action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.start.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore - - async def stop( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> None: - """Posts a stop action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.stop.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore - - async def restart( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs - ) -> None: - """Posts a restart action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.restart.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_containers_operations.py new file mode 100644 index 0000000000000..6947d27a80302 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_containers_operations.py @@ -0,0 +1,371 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._model_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`model_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.ModelContainerResourceArmPaginatedResult]: + """List model containers. + + List model containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is None. + :type count: int + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.ModelContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ModelContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ModelContainer, + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelContainer] + + _json = self._serialize.body(body, 'ModelContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ModelContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ModelContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py new file mode 100644 index 0000000000000..62e56050ad04a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_model_versions_operations.py @@ -0,0 +1,425 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._model_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`model_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.ModelVersionResourceArmPaginatedResult]: + """List model versions. + + List model versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Model name. This is case-sensitive. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param version: Model version. Default value is None. + :type version: str + :param description: Model description. Default value is None. + :type description: str + :param offset: Number of initial results to skip. Default value is None. + :type offset: int + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str + :param feed: Name of the feed. Default value is None. + :type feed: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + offset=offset, + tags=tags, + properties=properties, + feed=feed, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + offset=offset, + tags=tags, + properties=properties, + feed=feed, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions"} # type: ignore + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.ModelVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ModelVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ModelVersion, + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelVersion] + + _json = self._serialize.body(body, 'ModelVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ModelVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ModelVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_notebooks_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_notebooks_operations.py deleted file mode 100644 index b1dac0e119beb..0000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_notebooks_operations.py +++ /dev/null @@ -1,160 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling - -from ... import models as _models - -T = TypeVar('T') -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - -class NotebooksOperations: - """NotebooksOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - async def _prepare_initial( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> Optional["_models.NotebookResourceInfo"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.NotebookResourceInfo"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._prepare_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore - - async def begin_prepare( - self, - resource_group_name: str, - workspace_name: str, - **kwargs - ) -> AsyncLROPoller["_models.NotebookResourceInfo"]: - """prepare. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookResourceInfo"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._prepare_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_deployments_operations.py new file mode 100644 index 0000000000000..5a1bb491ef86e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_deployments_operations.py @@ -0,0 +1,892 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._online_deployments_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_logs_request, build_get_request, build_list_request, build_list_skus_request, build_update_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class OnlineDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`online_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.OnlineDeploymentTrackedResourceArmPaginatedResult]: + """List Inference Endpoint Deployments. + + List Inference Endpoint Deployments. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult + or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Inference Endpoint Deployment (asynchronous). + + Delete Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> _models.OnlineDeployment: + """Get Inference Deployment Deployment. + + Get Inference Deployment Deployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineDeployment, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + **kwargs: Any + ) -> Optional[_models.OnlineDeployment]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.OnlineDeployment]] + + _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithSku') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + **kwargs: Any + ) -> _models.OnlineDeployment: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + + _json = self._serialize.body(body, 'OnlineDeployment') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace_async + async def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.DeploymentLogsRequest, + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DeploymentLogs] + + _json = self._serialize.body(body, 'DeploymentLogsRequest') + + request = build_get_logs_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.get_logs.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DeploymentLogs', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_logs.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs"} # type: ignore + + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.SkuResourceArmPaginatedResult]: + """List Inference Endpoint Deployment Skus. + + List Inference Endpoint Deployment Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResourceArmPaginatedResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.SkuResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + count=count, + skip=skip, + template_url=self.list_skus.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_skus_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + count=count, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list_skus.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_endpoints_operations.py new file mode 100644 index 0000000000000..a4a2fdc42b3f0 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_online_endpoints_operations.py @@ -0,0 +1,981 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._online_endpoints_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_get_token_request, build_list_keys_request, build_list_request, build_regenerate_keys_request_initial, build_update_request_initial +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class OnlineEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`online_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, "_models.EndpointComputeType"]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, "_models.OrderString"]] = None, + **kwargs: Any + ) -> AsyncIterable[_models.OnlineEndpointTrackedResourceArmPaginatedResult]: + """List Online Endpoints. + + List Online Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Name of the endpoint. Default value is None. + :type name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param compute_type: EndpointComputeType to be filtered by. Default value is None. + :type compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + name=name, + count=count, + compute_type=compute_type, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + name=name, + count=count, + compute_type=compute_type, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete Online Endpoint (asynchronous). + + Delete Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.OnlineEndpoint: + """Get Online Endpoint. + + Get Online Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineEndpoint, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> Optional[_models.OnlineEndpoint]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.OnlineEndpoint]] + + _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + **kwargs: Any + ) -> _models.OnlineEndpoint: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + + _json = self._serialize.body(body, 'OnlineEndpoint') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace_async + async def list_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.EndpointAuthKeys: + """List EndpointAuthKeys for an Endpoint using Key-based authentication. + + List EndpointAuthKeys for an Endpoint using Key-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EndpointAuthKeys] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys"} # type: ignore + + + async def _regenerate_keys_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + + _json = self._serialize.body(body, 'RegenerateEndpointKeysRequest') + + request = build_regenerate_keys_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._regenerate_keys_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _regenerate_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + + @distributed_trace_async + async def begin_regenerate_keys( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param body: RegenerateKeys request . + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._regenerate_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_regenerate_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + @distributed_trace_async + async def get_token( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.EndpointAuthToken: + """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthToken, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthToken + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EndpointAuthToken] + + + request = build_get_token_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.get_token.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EndpointAuthToken', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py index 6b2ed5f29d08d..0645d754f7016 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,82 +6,94 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._operations import build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class Operations: - """Operations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def list( self, - **kwargs - ) -> AsyncIterable["_models.OperationListResult"]: + **kwargs: Any + ) -> AsyncIterable[_models.AmlOperationListResult]: """Lists all of the available Azure Machine Learning Workspaces REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OperationListResult] + :return: An iterator like instance of either AmlOperationListResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlOperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.AmlOperationListResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.get(url, query_parameters, header_parameters) else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("AmlOperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -89,17 +102,22 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) - list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore + list.metadata = {'url': "/providers/Microsoft.MachineLearningServices/operations"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_patch.py new file mode 100644 index 0000000000000..0ad201a8c586e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py index 5ef7c0591ff1a..e8fe93d6bc300 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,53 +6,146 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest -from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._private_endpoint_connections_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class PrivateEndpointConnectionsOperations: - """PrivateEndpointConnectionsOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`private_endpoint_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> AsyncIterable[_models.PrivateEndpointConnectionListResult]: + """List all the private endpoint connections associated with the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result + of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionListResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + + @distributed_trace_async async def get( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - **kwargs - ) -> "_models.PrivateEndpointConnection": + **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Gets the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -63,39 +157,41 @@ async def get( :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnection] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) @@ -104,19 +200,22 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - async def put( + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + + @distributed_trace_async + async def create_or_update( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - properties: "_models.PrivateEndpointConnection", - **kwargs - ) -> "_models.PrivateEndpointConnection": + properties: _models.PrivateEndpointConnection, + **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Update the state of specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -130,44 +229,45 @@ async def put( :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.put.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(properties, 'PrivateEndpointConnection') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnection] + + _json = self._serialize.body(properties, 'PrivateEndpointConnection') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) @@ -176,65 +276,21 @@ async def put( return cls(pipeline_response, deserialized, {}) return deserialized - put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - async def _delete_initial( - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs - ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - async def begin_delete( + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - **kwargs - ) -> AsyncLROPoller[None]: + **kwargs: Any + ) -> None: """Deletes the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -242,55 +298,49 @@ async def begin_delete( with the workspace. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = await self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - private_endpoint_connection_name=private_endpoint_connection_name, - cls=lambda x,y,z: x, - **kwargs - ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling - if cont_token: - return AsyncLROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py index 882726b29838c..4c5e0ffad87f0 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_private_link_resources_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,50 +6,52 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, Callable, Dict, Optional, TypeVar from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._private_link_resources_operations import build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class PrivateLinkResourcesOperations: - """PrivateLinkResourcesOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def list_by_workspace( + + @distributed_trace_async + async def list( self, resource_group_name: str, workspace_name: str, - **kwargs - ) -> "_models.PrivateLinkResourceListResult": + **kwargs: Any + ) -> _models.PrivateLinkResourceListResult: """Gets the private link resources that need to be created for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -57,38 +60,41 @@ async def list_by_workspace( :rtype: ~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateLinkResourceListResult] + + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) @@ -96,4 +102,6 @@ async def list_by_workspace( return cls(pipeline_response, deserialized, {}) return deserialized - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py index 6bdae569077ff..0db7d7d32dba1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_quotas_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,48 +6,51 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._quotas_operations import build_list_request, build_update_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class QuotasOperations: - """QuotasOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`quotas` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async async def update( self, location: str, - parameters: "_models.QuotaUpdateParameters", - **kwargs - ) -> "_models.UpdateWorkspaceQuotasResult": + parameters: _models.QuotaUpdateParameters, + **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: """Update quota for each VM family in workspace. :param location: The location for update quota is queried. @@ -58,42 +62,43 @@ async def update( :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UpdateWorkspaceQuotasResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'QuotaUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.UpdateWorkspaceQuotasResult] + + _json = self._serialize.body(parameters, 'QuotaUpdateParameters') + + request = build_update_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) @@ -102,56 +107,67 @@ async def update( return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore + update.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas"} # type: ignore + + + @distributed_trace def list( self, location: str, - **kwargs - ) -> AsyncIterable["_models.ListWorkspaceQuotas"]: + **kwargs: Any + ) -> AsyncIterable[_models.ListWorkspaceQuotas]: """Gets the currently assigned Workspace Quotas based on VMFamily. :param location: The location for which resource usage is queried. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceQuotas"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListWorkspaceQuotas] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response) + deserialized = self._deserialize("ListWorkspaceQuotas", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -160,16 +176,22 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore + list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_schedules_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_schedules_operations.py new file mode 100644 index 0000000000000..0bff7efa4fcd9 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_schedules_operations.py @@ -0,0 +1,504 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._schedules_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SchedulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`schedules` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.ScheduleResourceArmPaginatedResult]: + """List schedules in specified workspace. + + List schedules in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ScheduleResourceArmPaginatedResult or the result + of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ScheduleResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ScheduleResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("ScheduleResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete schedule. + + Delete schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Schedule name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @distributed_trace_async + async def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.Schedule: + """Get schedule. + + Get schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Schedule name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Schedule, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Schedule + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Schedule] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Schedule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Schedule, + **kwargs: Any + ) -> _models.Schedule: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Schedule] + + _json = self._serialize.body(body, 'Schedule') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('Schedule', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('Schedule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Schedule, + **kwargs: Any + ) -> AsyncLROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Schedule name. + :type name: str + :param body: Schedule definition. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Schedule or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Schedule] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Schedule', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py index b5a6eb14f798e..ba4a76057f890 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_usages_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,47 +6,49 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._usages_operations import build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class UsagesOperations: - """UsagesOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`usages` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def list( self, location: str, - **kwargs - ) -> AsyncIterable["_models.ListUsagesResult"]: + **kwargs: Any + ) -> AsyncIterable[_models.ListUsagesResult]: """Gets the current usage information as well as limits for AML resources for given subscription and location. @@ -53,43 +56,51 @@ def list( :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ListUsagesResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListUsagesResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListUsagesResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ListUsagesResult', pipeline_response) + deserialized = self._deserialize("ListUsagesResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -98,16 +109,22 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore + list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py index 109eba1351e8c..2f59914dcafc1 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,96 +6,91 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, Callable, Dict, Optional, TypeVar from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._virtual_machine_sizes_operations import build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class VirtualMachineSizesOperations: - """VirtualMachineSizesOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`virtual_machine_sizes` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async async def list( self, location: str, - compute_type: Optional[str] = None, - recommended: Optional[bool] = None, - **kwargs - ) -> "_models.VirtualMachineSizeListResult": + **kwargs: Any + ) -> _models.VirtualMachineSizeListResult: """Returns supported VM Sizes in a location. :param location: The location upon which virtual-machine-sizes is queried. :type location: str - :param compute_type: Type of compute to filter by. - :type compute_type: str - :param recommended: Specifies whether to return recommended vm sizes or all vm sizes. - :type recommended: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualMachineSizeListResult, or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if compute_type is not None: - query_parameters['compute-type'] = self._serialize.query("compute_type", compute_type, 'str') - if recommended is not None: - query_parameters['recommended'] = self._serialize.query("recommended", recommended, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.VirtualMachineSizeListResult] + + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) @@ -102,4 +98,6 @@ async def list( return cls(pipeline_response, deserialized, {}) return deserialized - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore + + list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py index c012377414782..f5267e82ff530 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,317 +6,349 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._workspace_connections_operations import build_create_request, build_delete_request, build_get_request, build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class WorkspaceConnectionsOperations: - """WorkspaceConnectionsOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`workspace_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def list( + + @distributed_trace_async + async def create( self, resource_group_name: str, workspace_name: str, - target: Optional[str] = None, - category: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.PaginatedWorkspaceConnectionsList"]: - """List all connections under a AML workspace. + connection_name: str, + parameters: _models.WorkspaceConnectionPropertiesV2BasicResource, + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param target: Target of the workspace connection. - :type target: str - :param category: Category of the workspace connection. - :type category: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :param parameters: The object for creating or updating a new workspace connection. + :type parameters: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedWorkspaceConnectionsList] + :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedWorkspaceConnectionsList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + _json = self._serialize.body(parameters, 'WorkspaceConnectionPropertiesV2BasicResource') + + request = build_create_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if target is not None: - query_parameters['target'] = self._serialize.query("target", target, 'str') - if category is not None: - query_parameters['category'] = self._serialize.query("category", category, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response - async def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return None, AsyncList(list_of_elem) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - async def get_next(next_link=None): - request = prepare_request(next_link) + deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + if cls: + return cls(pipeline_response, deserialized, {}) - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + return deserialized - return pipeline_response + create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore - async def create( + @distributed_trace_async + async def get( self, resource_group_name: str, workspace_name: str, connection_name: str, - parameters: "_models.WorkspaceConnectionDto", - **kwargs - ) -> "_models.WorkspaceConnection": - """Add a new workspace connection. + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """get. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param connection_name: Friendly name of the workspace connection. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionDto :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection + :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore - async def get( + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + + @distributed_trace_async + async def delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, connection_name: str, - **kwargs - ) -> "_models.WorkspaceConnection": - """Get the detail of a workspace connection. + **kwargs: Any + ) -> None: + """delete. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param connection_name: Friendly name of the workspace connection. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection + :return: None, or the result of cls(response) + :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop('error_map', {}) or {}) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore - async def delete( + @distributed_trace + def list( self, resource_group_name: str, workspace_name: str, - connection_name: str, - **kwargs - ) -> None: - """Delete a workspace connection. + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult]: + """list. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. - :type connection_name: str + :param target: Target of the workspace connection. Default value is None. + :type target: str + :param category: Category of the workspace connection. Default value is None. + :type category: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None + :return: An iterator like instance of either + WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[None] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + target=target, + category=category, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + target=target, + category=category, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + async def extract_data(pipeline_response): + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + async def get_next(next_link=None): + request = prepare_request(next_link) - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response - if cls: - return cls(pipeline_response, None, {}) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + return pipeline_response + + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py index 019e736e719f1..5786f613a2f28 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspace_features_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,94 +6,106 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._workspace_features_operations import build_list_request T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class WorkspaceFeaturesOperations: - """WorkspaceFeaturesOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`workspace_features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def list( self, resource_group_name: str, workspace_name: str, - **kwargs - ) -> AsyncIterable["_models.ListAmlUserFeatureResult"]: + **kwargs: Any + ) -> AsyncIterable[_models.ListAmlUserFeatureResult]: """Lists all enabled features for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] + :return: An iterator like instance of either ListAmlUserFeatureResult or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAmlUserFeatureResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListAmlUserFeatureResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response) + deserialized = self._deserialize("ListAmlUserFeatureResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -101,17 +114,22 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py index 75d2153d645f5..3748667287ecc 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/aio/operations/_workspaces_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,53 +6,56 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - +from ..._vendor import _convert_request +from ...operations._workspaces_operations import build_create_or_update_request_initial, build_delete_request_initial, build_diagnose_request_initial, build_get_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_list_keys_request, build_list_notebook_access_token_request, build_list_notebook_keys_request, build_list_outbound_network_dependencies_endpoints_request, build_list_storage_account_keys_request, build_prepare_notebook_request_initial, build_resync_keys_request_initial, build_update_request_initial T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class WorkspacesOperations: - """WorkspacesOperations async operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningWorkspaces`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async async def get( self, resource_group_name: str, workspace_name: str, - **kwargs - ) -> "_models.Workspace": + **kwargs: Any + ) -> _models.Workspace: """Gets the properties of the specified machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -60,38 +64,40 @@ async def get( :rtype: ~azure.mgmt.machinelearningservices.models.Workspace :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Workspace] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Workspace', pipeline_response) @@ -100,77 +106,79 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, - parameters: "_models.Workspace", - **kwargs - ) -> Optional["_models.Workspace"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + parameters: _models.Workspace, + **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'Workspace') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.Workspace]] + + _json = self._serialize.body(parameters, 'Workspace') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response - if response.status_code not in [200, 201, 202]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('Workspace', pipeline_response) - if response.status_code == 201: - deserialized = self._deserialize('Workspace', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + + @distributed_trace_async async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - parameters: "_models.Workspace", - **kwargs - ) -> AsyncLROPoller["_models.Workspace"]: + parameters: _models.Workspace, + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: """Creates or updates a workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -178,48 +186,59 @@ async def begin_create_or_update( :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises ~azure.core.exceptions.HttpResponseError: + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises: ~azure.core.exceptions.HttpResponseError """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Workspace] polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = await self._create_or_update_initial( + raw_result = await self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, + api_version=api_version, + content_type=content_type, cls=lambda x,y,z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('Workspace', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( @@ -228,107 +247,118 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - async def _delete_initial( + async def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, - **kwargs + **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - async def begin_delete( + + @distributed_trace_async + async def begin_delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes a machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, + api_version=api_version, cls=lambda x,y,z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( @@ -337,130 +367,213 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: _models.WorkspaceUpdateParameters, + **kwargs: Any + ) -> Optional[_models.Workspace]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.Workspace]] + + _json = self._serialize.body(parameters, 'WorkspaceUpdateParameters') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) - async def update( + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + + @distributed_trace_async + async def begin_update( self, resource_group_name: str, workspace_name: str, - parameters: "_models.WorkspaceUpdateParameters", - **kwargs - ) -> "_models.Workspace": + parameters: _models.WorkspaceUpdateParameters, + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: """Updates a machine learning workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param parameters: The parameters for updating a machine learning workspace. :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.Workspace + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Workspace] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) - deserialized = self._deserialize('Workspace', pipeline_response) + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Workspace', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @distributed_trace def list_by_resource_group( self, resource_group_name: str, - skiptoken: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.WorkspaceListResult"]: + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.WorkspaceListResult]: """Lists all the available machine learning workspaces under the specified resource group. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceListResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + api_version=api_version, + skip=skip, + template_url=self.list_by_resource_group.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -469,31 +582,189 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + async def _diagnose_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + **kwargs: Any + ) -> Optional[_models.DiagnoseResponseResult]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.DiagnoseResponseResult]] + + if parameters is not None: + _json = self._serialize.body(parameters, 'DiagnoseWorkspaceParameters') + else: + _json = None + + request = build_diagnose_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._diagnose_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _diagnose_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + + @distributed_trace_async + async def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + **kwargs: Any + ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Default value is None. + :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DiagnoseResponseResult or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnoseResponseResult] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._diagnose_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_diagnose.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + @distributed_trace_async async def list_keys( self, resource_group_name: str, workspace_name: str, - **kwargs - ) -> "_models.ListWorkspaceKeysResult": + **kwargs: Any + ) -> _models.ListWorkspaceKeysResult: """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -502,38 +773,40 @@ async def list_keys( :rtype: ~azure.mgmt.machinelearningservices.models.ListWorkspaceKeysResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceKeysResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListWorkspaceKeysResult] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) @@ -542,114 +815,188 @@ async def list_keys( return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore - async def resync_keys( + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys"} # type: ignore + + + async def _resync_keys_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, - **kwargs + **kwargs: Any ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_resync_keys_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self._resync_keys_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _resync_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + + + @distributed_trace_async + async def begin_resync_keys( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: """Resync all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.resync_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._resync_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, None, {}) + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore + begin_resync_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + @distributed_trace def list_by_subscription( self, - skiptoken: Optional[str] = None, - **kwargs - ) -> AsyncIterable["_models.WorkspaceListResult"]: + skip: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable[_models.WorkspaceListResult]: """Lists all the available machine learning workspaces under the specified subscription. - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceListResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + skip=skip, + template_url=self.list_by_subscription.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -658,17 +1005,412 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return AsyncItemPaged( get_next, extract_data ) - list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + list_by_subscription.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + @distributed_trace_async + async def list_notebook_access_token( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.NotebookAccessTokenResult: + """return notebook access token and refresh token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NotebookAccessTokenResult, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.NotebookAccessTokenResult] + + + request = build_list_notebook_access_token_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_notebook_access_token.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_access_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken"} # type: ignore + + + async def _prepare_notebook_initial( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> Optional[_models.NotebookResourceInfo]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.NotebookResourceInfo]] + + + request = build_prepare_notebook_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self._prepare_notebook_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _prepare_notebook_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + + @distributed_trace_async + async def begin_prepare_notebook( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> AsyncLROPoller[_models.NotebookResourceInfo]: + """Prepare a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.NotebookResourceInfo] + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._prepare_notebook_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: AsyncPollingMethod + elif polling is False: polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_prepare_notebook.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + @distributed_trace_async + async def list_storage_account_keys( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: + """List storage account keys of a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListStorageAccountKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListStorageAccountKeysResult] + + + request = build_list_storage_account_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_storage_account_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_storage_account_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys"} # type: ignore + + + @distributed_trace_async + async def list_notebook_keys( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.ListNotebookKeysResult: + """List keys of a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListNotebookKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListNotebookKeysResult] + + + request = build_list_notebook_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_notebook_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys"} # type: ignore + + + @distributed_trace_async + async def list_outbound_network_dependencies_endpoints( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.ExternalFQDNResponse: + """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExternalFQDNResponse, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ExternalFQDNResponse] + + + request = build_list_outbound_network_dependencies_endpoints_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ExternalFQDNResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_outbound_network_dependencies_endpoints.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py index 05c48b7f14c0f..d95d06913d27d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py @@ -6,340 +6,1076 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -try: - from ._models_py3 import AKS - from ._models_py3 import AKSProperties - from ._models_py3 import AksComputeSecrets - from ._models_py3 import AksNetworkingConfiguration - from ._models_py3 import AmlCompute - from ._models_py3 import AmlComputeNodeInformation - from ._models_py3 import AmlComputeNodesInformation - from ._models_py3 import AmlComputeProperties - from ._models_py3 import AmlUserFeature - from ._models_py3 import ClusterUpdateParameters - from ._models_py3 import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties - from ._models_py3 import Compute - from ._models_py3 import ComputeInstance - from ._models_py3 import ComputeInstanceApplication - from ._models_py3 import ComputeInstanceConnectivityEndpoints - from ._models_py3 import ComputeInstanceCreatedBy - from ._models_py3 import ComputeInstanceLastOperation - from ._models_py3 import ComputeInstanceProperties - from ._models_py3 import ComputeInstanceSshSettings - from ._models_py3 import ComputeNodesInformation - from ._models_py3 import ComputeResource - from ._models_py3 import ComputeSecrets - from ._models_py3 import DataFactory - from ._models_py3 import DataLakeAnalytics - from ._models_py3 import DataLakeAnalyticsProperties - from ._models_py3 import Databricks - from ._models_py3 import DatabricksComputeSecrets - from ._models_py3 import DatabricksProperties - from ._models_py3 import EncryptionProperty - from ._models_py3 import ErrorDetail - from ._models_py3 import ErrorResponse - from ._models_py3 import EstimatedVMPrice - from ._models_py3 import EstimatedVMPrices - from ._models_py3 import HDInsight - from ._models_py3 import HDInsightProperties - from ._models_py3 import Identity - from ._models_py3 import KeyVaultProperties - from ._models_py3 import ListAmlUserFeatureResult - from ._models_py3 import ListUsagesResult - from ._models_py3 import ListWorkspaceKeysResult - from ._models_py3 import ListWorkspaceQuotas - from ._models_py3 import MachineLearningServiceError - from ._models_py3 import NodeStateCounts - from ._models_py3 import NotebookListCredentialsResult - from ._models_py3 import NotebookPreparationError - from ._models_py3 import NotebookResourceInfo - from ._models_py3 import Operation - from ._models_py3 import OperationDisplay - from ._models_py3 import OperationListResult - from ._models_py3 import PaginatedComputeResourcesList - from ._models_py3 import PaginatedWorkspaceConnectionsList - from ._models_py3 import Password - from ._models_py3 import PrivateEndpoint - from ._models_py3 import PrivateEndpointConnection - from ._models_py3 import PrivateLinkResource - from ._models_py3 import PrivateLinkResourceListResult - from ._models_py3 import PrivateLinkServiceConnectionState - from ._models_py3 import QuotaBaseProperties - from ._models_py3 import QuotaUpdateParameters - from ._models_py3 import RegistryListCredentialsResult - from ._models_py3 import Resource - from ._models_py3 import ResourceId - from ._models_py3 import ResourceName - from ._models_py3 import ResourceQuota - from ._models_py3 import ResourceSkuLocationInfo - from ._models_py3 import ResourceSkuZoneDetails - from ._models_py3 import Restriction - from ._models_py3 import SKUCapability - from ._models_py3 import ScaleSettings - from ._models_py3 import ServicePrincipalCredentials - from ._models_py3 import SharedPrivateLinkResource - from ._models_py3 import Sku - from ._models_py3 import SkuListResult - from ._models_py3 import SkuSettings - from ._models_py3 import SslConfiguration - from ._models_py3 import SystemService - from ._models_py3 import UpdateWorkspaceQuotas - from ._models_py3 import UpdateWorkspaceQuotasResult - from ._models_py3 import Usage - from ._models_py3 import UsageName - from ._models_py3 import UserAccountCredentials - from ._models_py3 import VirtualMachine - from ._models_py3 import VirtualMachineProperties - from ._models_py3 import VirtualMachineSecrets - from ._models_py3 import VirtualMachineSize - from ._models_py3 import VirtualMachineSizeListResult - from ._models_py3 import VirtualMachineSshCredentials - from ._models_py3 import Workspace - from ._models_py3 import WorkspaceConnection - from ._models_py3 import WorkspaceConnectionDto - from ._models_py3 import WorkspaceListResult - from ._models_py3 import WorkspaceSku - from ._models_py3 import WorkspaceUpdateParameters -except (SyntaxError, ImportError): - from ._models import AKS # type: ignore - from ._models import AKSProperties # type: ignore - from ._models import AksComputeSecrets # type: ignore - from ._models import AksNetworkingConfiguration # type: ignore - from ._models import AmlCompute # type: ignore - from ._models import AmlComputeNodeInformation # type: ignore - from ._models import AmlComputeNodesInformation # type: ignore - from ._models import AmlComputeProperties # type: ignore - from ._models import AmlUserFeature # type: ignore - from ._models import ClusterUpdateParameters # type: ignore - from ._models import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties # type: ignore - from ._models import Compute # type: ignore - from ._models import ComputeInstance # type: ignore - from ._models import ComputeInstanceApplication # type: ignore - from ._models import ComputeInstanceConnectivityEndpoints # type: ignore - from ._models import ComputeInstanceCreatedBy # type: ignore - from ._models import ComputeInstanceLastOperation # type: ignore - from ._models import ComputeInstanceProperties # type: ignore - from ._models import ComputeInstanceSshSettings # type: ignore - from ._models import ComputeNodesInformation # type: ignore - from ._models import ComputeResource # type: ignore - from ._models import ComputeSecrets # type: ignore - from ._models import DataFactory # type: ignore - from ._models import DataLakeAnalytics # type: ignore - from ._models import DataLakeAnalyticsProperties # type: ignore - from ._models import Databricks # type: ignore - from ._models import DatabricksComputeSecrets # type: ignore - from ._models import DatabricksProperties # type: ignore - from ._models import EncryptionProperty # type: ignore - from ._models import ErrorDetail # type: ignore - from ._models import ErrorResponse # type: ignore - from ._models import EstimatedVMPrice # type: ignore - from ._models import EstimatedVMPrices # type: ignore - from ._models import HDInsight # type: ignore - from ._models import HDInsightProperties # type: ignore - from ._models import Identity # type: ignore - from ._models import KeyVaultProperties # type: ignore - from ._models import ListAmlUserFeatureResult # type: ignore - from ._models import ListUsagesResult # type: ignore - from ._models import ListWorkspaceKeysResult # type: ignore - from ._models import ListWorkspaceQuotas # type: ignore - from ._models import MachineLearningServiceError # type: ignore - from ._models import NodeStateCounts # type: ignore - from ._models import NotebookListCredentialsResult # type: ignore - from ._models import NotebookPreparationError # type: ignore - from ._models import NotebookResourceInfo # type: ignore - from ._models import Operation # type: ignore - from ._models import OperationDisplay # type: ignore - from ._models import OperationListResult # type: ignore - from ._models import PaginatedComputeResourcesList # type: ignore - from ._models import PaginatedWorkspaceConnectionsList # type: ignore - from ._models import Password # type: ignore - from ._models import PrivateEndpoint # type: ignore - from ._models import PrivateEndpointConnection # type: ignore - from ._models import PrivateLinkResource # type: ignore - from ._models import PrivateLinkResourceListResult # type: ignore - from ._models import PrivateLinkServiceConnectionState # type: ignore - from ._models import QuotaBaseProperties # type: ignore - from ._models import QuotaUpdateParameters # type: ignore - from ._models import RegistryListCredentialsResult # type: ignore - from ._models import Resource # type: ignore - from ._models import ResourceId # type: ignore - from ._models import ResourceName # type: ignore - from ._models import ResourceQuota # type: ignore - from ._models import ResourceSkuLocationInfo # type: ignore - from ._models import ResourceSkuZoneDetails # type: ignore - from ._models import Restriction # type: ignore - from ._models import SKUCapability # type: ignore - from ._models import ScaleSettings # type: ignore - from ._models import ServicePrincipalCredentials # type: ignore - from ._models import SharedPrivateLinkResource # type: ignore - from ._models import Sku # type: ignore - from ._models import SkuListResult # type: ignore - from ._models import SkuSettings # type: ignore - from ._models import SslConfiguration # type: ignore - from ._models import SystemService # type: ignore - from ._models import UpdateWorkspaceQuotas # type: ignore - from ._models import UpdateWorkspaceQuotasResult # type: ignore - from ._models import Usage # type: ignore - from ._models import UsageName # type: ignore - from ._models import UserAccountCredentials # type: ignore - from ._models import VirtualMachine # type: ignore - from ._models import VirtualMachineProperties # type: ignore - from ._models import VirtualMachineSecrets # type: ignore - from ._models import VirtualMachineSize # type: ignore - from ._models import VirtualMachineSizeListResult # type: ignore - from ._models import VirtualMachineSshCredentials # type: ignore - from ._models import Workspace # type: ignore - from ._models import WorkspaceConnection # type: ignore - from ._models import WorkspaceConnectionDto # type: ignore - from ._models import WorkspaceListResult # type: ignore - from ._models import WorkspaceSku # type: ignore - from ._models import WorkspaceUpdateParameters # type: ignore +from ._models_py3 import AKS +from ._models_py3 import AKSSchema +from ._models_py3 import AKSSchemaProperties +from ._models_py3 import AccountKeyDatastoreCredentials +from ._models_py3 import AccountKeyDatastoreSecrets +from ._models_py3 import AksComputeSecrets +from ._models_py3 import AksComputeSecretsProperties +from ._models_py3 import AksNetworkingConfiguration +from ._models_py3 import AmlCompute +from ._models_py3 import AmlComputeNodeInformation +from ._models_py3 import AmlComputeNodesInformation +from ._models_py3 import AmlComputeProperties +from ._models_py3 import AmlComputeSchema +from ._models_py3 import AmlOperation +from ._models_py3 import AmlOperationDisplay +from ._models_py3 import AmlOperationListResult +from ._models_py3 import AmlToken +from ._models_py3 import AmlUserFeature +from ._models_py3 import AssetBase +from ._models_py3 import AssetContainer +from ._models_py3 import AssetJobInput +from ._models_py3 import AssetJobOutput +from ._models_py3 import AssetReferenceBase +from ._models_py3 import AssignedUser +from ._models_py3 import AutoForecastHorizon +from ._models_py3 import AutoMLJob +from ._models_py3 import AutoMLVertical +from ._models_py3 import AutoNCrossValidations +from ._models_py3 import AutoPauseProperties +from ._models_py3 import AutoScaleProperties +from ._models_py3 import AutoSeasonality +from ._models_py3 import AutoTargetLags +from ._models_py3 import AutoTargetRollingWindowSize +from ._models_py3 import AzureBlobDatastore +from ._models_py3 import AzureDataLakeGen1Datastore +from ._models_py3 import AzureDataLakeGen2Datastore +from ._models_py3 import AzureDatastore +from ._models_py3 import AzureFileDatastore +from ._models_py3 import BanditPolicy +from ._models_py3 import BatchDeployment +from ._models_py3 import BatchDeploymentProperties +from ._models_py3 import BatchDeploymentTrackedResourceArmPaginatedResult +from ._models_py3 import BatchEndpoint +from ._models_py3 import BatchEndpointDefaults +from ._models_py3 import BatchEndpointProperties +from ._models_py3 import BatchEndpointTrackedResourceArmPaginatedResult +from ._models_py3 import BatchRetrySettings +from ._models_py3 import BayesianSamplingAlgorithm +from ._models_py3 import BindOptions +from ._models_py3 import BuildContext +from ._models_py3 import CertificateDatastoreCredentials +from ._models_py3 import CertificateDatastoreSecrets +from ._models_py3 import Classification +from ._models_py3 import ClassificationTrainingSettings +from ._models_py3 import ClusterUpdateParameters +from ._models_py3 import CocoExportSummary +from ._models_py3 import CodeConfiguration +from ._models_py3 import CodeContainer +from ._models_py3 import CodeContainerProperties +from ._models_py3 import CodeContainerResourceArmPaginatedResult +from ._models_py3 import CodeVersion +from ._models_py3 import CodeVersionProperties +from ._models_py3 import CodeVersionResourceArmPaginatedResult +from ._models_py3 import ColumnTransformer +from ._models_py3 import CommandJob +from ._models_py3 import CommandJobLimits +from ._models_py3 import ComponentContainer +from ._models_py3 import ComponentContainerProperties +from ._models_py3 import ComponentContainerResourceArmPaginatedResult +from ._models_py3 import ComponentVersion +from ._models_py3 import ComponentVersionProperties +from ._models_py3 import ComponentVersionResourceArmPaginatedResult +from ._models_py3 import Compute +from ._models_py3 import ComputeInstance +from ._models_py3 import ComputeInstanceApplication +from ._models_py3 import ComputeInstanceConnectivityEndpoints +from ._models_py3 import ComputeInstanceContainer +from ._models_py3 import ComputeInstanceCreatedBy +from ._models_py3 import ComputeInstanceDataDisk +from ._models_py3 import ComputeInstanceDataMount +from ._models_py3 import ComputeInstanceEnvironmentInfo +from ._models_py3 import ComputeInstanceLastOperation +from ._models_py3 import ComputeInstanceProperties +from ._models_py3 import ComputeInstanceSchema +from ._models_py3 import ComputeInstanceSshSettings +from ._models_py3 import ComputeInstanceVersion +from ._models_py3 import ComputeResource +from ._models_py3 import ComputeResourceSchema +from ._models_py3 import ComputeSchedules +from ._models_py3 import ComputeSecrets +from ._models_py3 import ComputeStartStopSchedule +from ._models_py3 import ContainerResourceRequirements +from ._models_py3 import ContainerResourceSettings +from ._models_py3 import CosmosDbSettings +from ._models_py3 import CronTrigger +from ._models_py3 import CsvExportSummary +from ._models_py3 import CustomForecastHorizon +from ._models_py3 import CustomModelJobInput +from ._models_py3 import CustomModelJobOutput +from ._models_py3 import CustomNCrossValidations +from ._models_py3 import CustomSeasonality +from ._models_py3 import CustomService +from ._models_py3 import CustomTargetLags +from ._models_py3 import CustomTargetRollingWindowSize +from ._models_py3 import DataContainer +from ._models_py3 import DataContainerProperties +from ._models_py3 import DataContainerResourceArmPaginatedResult +from ._models_py3 import DataFactory +from ._models_py3 import DataLakeAnalytics +from ._models_py3 import DataLakeAnalyticsSchema +from ._models_py3 import DataLakeAnalyticsSchemaProperties +from ._models_py3 import DataPathAssetReference +from ._models_py3 import DataVersionBase +from ._models_py3 import DataVersionBaseProperties +from ._models_py3 import DataVersionBaseResourceArmPaginatedResult +from ._models_py3 import Databricks +from ._models_py3 import DatabricksComputeSecrets +from ._models_py3 import DatabricksComputeSecretsProperties +from ._models_py3 import DatabricksProperties +from ._models_py3 import DatabricksSchema +from ._models_py3 import DatasetExportSummary +from ._models_py3 import Datastore +from ._models_py3 import DatastoreCredentials +from ._models_py3 import DatastoreProperties +from ._models_py3 import DatastoreResourceArmPaginatedResult +from ._models_py3 import DatastoreSecrets +from ._models_py3 import DefaultScaleSettings +from ._models_py3 import DeploymentLogs +from ._models_py3 import DeploymentLogsRequest +from ._models_py3 import DeploymentResourceConfiguration +from ._models_py3 import DiagnoseRequestProperties +from ._models_py3 import DiagnoseResponseResult +from ._models_py3 import DiagnoseResponseResultValue +from ._models_py3 import DiagnoseResult +from ._models_py3 import DiagnoseWorkspaceParameters +from ._models_py3 import DistributionConfiguration +from ._models_py3 import Docker +from ._models_py3 import EarlyTerminationPolicy +from ._models_py3 import EncryptionKeyVaultProperties +from ._models_py3 import EncryptionKeyVaultUpdateProperties +from ._models_py3 import EncryptionProperty +from ._models_py3 import EncryptionUpdateProperties +from ._models_py3 import Endpoint +from ._models_py3 import EndpointAuthKeys +from ._models_py3 import EndpointAuthToken +from ._models_py3 import EndpointDeploymentPropertiesBase +from ._models_py3 import EndpointPropertiesBase +from ._models_py3 import EndpointScheduleAction +from ._models_py3 import EnvironmentContainer +from ._models_py3 import EnvironmentContainerProperties +from ._models_py3 import EnvironmentContainerResourceArmPaginatedResult +from ._models_py3 import EnvironmentVariable +from ._models_py3 import EnvironmentVersion +from ._models_py3 import EnvironmentVersionProperties +from ._models_py3 import EnvironmentVersionResourceArmPaginatedResult +from ._models_py3 import ErrorAdditionalInfo +from ._models_py3 import ErrorDetail +from ._models_py3 import ErrorResponse +from ._models_py3 import EstimatedVMPrice +from ._models_py3 import EstimatedVMPrices +from ._models_py3 import ExportSummary +from ._models_py3 import ExternalFQDNResponse +from ._models_py3 import FQDNEndpoint +from ._models_py3 import FQDNEndpointDetail +from ._models_py3 import FQDNEndpoints +from ._models_py3 import FQDNEndpointsProperties +from ._models_py3 import FeaturizationSettings +from ._models_py3 import FlavorData +from ._models_py3 import ForecastHorizon +from ._models_py3 import Forecasting +from ._models_py3 import ForecastingSettings +from ._models_py3 import ForecastingTrainingSettings +from ._models_py3 import GridSamplingAlgorithm +from ._models_py3 import HDInsight +from ._models_py3 import HDInsightProperties +from ._models_py3 import HDInsightSchema +from ._models_py3 import HdfsDatastore +from ._models_py3 import IdAssetReference +from ._models_py3 import IdentityConfiguration +from ._models_py3 import IdentityForCmk +from ._models_py3 import IdleShutdownSetting +from ._models_py3 import Image +from ._models_py3 import ImageClassification +from ._models_py3 import ImageClassificationBase +from ._models_py3 import ImageClassificationMultilabel +from ._models_py3 import ImageInstanceSegmentation +from ._models_py3 import ImageLimitSettings +from ._models_py3 import ImageModelDistributionSettings +from ._models_py3 import ImageModelDistributionSettingsClassification +from ._models_py3 import ImageModelDistributionSettingsObjectDetection +from ._models_py3 import ImageModelSettings +from ._models_py3 import ImageModelSettingsClassification +from ._models_py3 import ImageModelSettingsObjectDetection +from ._models_py3 import ImageObjectDetection +from ._models_py3 import ImageObjectDetectionBase +from ._models_py3 import ImageSweepLimitSettings +from ._models_py3 import ImageSweepSettings +from ._models_py3 import ImageVertical +from ._models_py3 import InferenceContainerProperties +from ._models_py3 import InstanceTypeSchema +from ._models_py3 import InstanceTypeSchemaResources +from ._models_py3 import JobBase +from ._models_py3 import JobBaseProperties +from ._models_py3 import JobBaseResourceArmPaginatedResult +from ._models_py3 import JobInput +from ._models_py3 import JobLimits +from ._models_py3 import JobOutput +from ._models_py3 import JobResourceConfiguration +from ._models_py3 import JobScheduleAction +from ._models_py3 import JobService +from ._models_py3 import KerberosCredentials +from ._models_py3 import KerberosKeytabCredentials +from ._models_py3 import KerberosKeytabSecrets +from ._models_py3 import KerberosPasswordCredentials +from ._models_py3 import KerberosPasswordSecrets +from ._models_py3 import Kubernetes +from ._models_py3 import KubernetesOnlineDeployment +from ._models_py3 import KubernetesProperties +from ._models_py3 import KubernetesSchema +from ._models_py3 import LabelCategory +from ._models_py3 import LabelClass +from ._models_py3 import LabelingDataConfiguration +from ._models_py3 import LabelingJob +from ._models_py3 import LabelingJobImageProperties +from ._models_py3 import LabelingJobInstructions +from ._models_py3 import LabelingJobMediaProperties +from ._models_py3 import LabelingJobProperties +from ._models_py3 import LabelingJobResourceArmPaginatedResult +from ._models_py3 import LabelingJobTextProperties +from ._models_py3 import ListAmlUserFeatureResult +from ._models_py3 import ListNotebookKeysResult +from ._models_py3 import ListStorageAccountKeysResult +from ._models_py3 import ListUsagesResult +from ._models_py3 import ListWorkspaceKeysResult +from ._models_py3 import ListWorkspaceQuotas +from ._models_py3 import LiteralJobInput +from ._models_py3 import MLAssistConfiguration +from ._models_py3 import MLAssistConfigurationDisabled +from ._models_py3 import MLAssistConfigurationEnabled +from ._models_py3 import MLFlowModelJobInput +from ._models_py3 import MLFlowModelJobOutput +from ._models_py3 import MLTableData +from ._models_py3 import MLTableJobInput +from ._models_py3 import MLTableJobOutput +from ._models_py3 import ManagedIdentity +from ._models_py3 import ManagedIdentityAuthTypeWorkspaceConnectionProperties +from ._models_py3 import ManagedOnlineDeployment +from ._models_py3 import ManagedServiceIdentity +from ._models_py3 import MedianStoppingPolicy +from ._models_py3 import ModelContainer +from ._models_py3 import ModelContainerProperties +from ._models_py3 import ModelContainerResourceArmPaginatedResult +from ._models_py3 import ModelVersion +from ._models_py3 import ModelVersionProperties +from ._models_py3 import ModelVersionResourceArmPaginatedResult +from ._models_py3 import Mpi +from ._models_py3 import NCrossValidations +from ._models_py3 import NlpVertical +from ._models_py3 import NlpVerticalFeaturizationSettings +from ._models_py3 import NlpVerticalLimitSettings +from ._models_py3 import NodeStateCounts +from ._models_py3 import NoneAuthTypeWorkspaceConnectionProperties +from ._models_py3 import NoneDatastoreCredentials +from ._models_py3 import NotebookAccessTokenResult +from ._models_py3 import NotebookPreparationError +from ._models_py3 import NotebookResourceInfo +from ._models_py3 import Objective +from ._models_py3 import OnlineDeployment +from ._models_py3 import OnlineDeploymentProperties +from ._models_py3 import OnlineDeploymentTrackedResourceArmPaginatedResult +from ._models_py3 import OnlineEndpoint +from ._models_py3 import OnlineEndpointProperties +from ._models_py3 import OnlineEndpointTrackedResourceArmPaginatedResult +from ._models_py3 import OnlineRequestSettings +from ._models_py3 import OnlineScaleSettings +from ._models_py3 import OutputPathAssetReference +from ._models_py3 import PATAuthTypeWorkspaceConnectionProperties +from ._models_py3 import PaginatedComputeResourcesList +from ._models_py3 import PartialBatchDeployment +from ._models_py3 import PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties +from ._models_py3 import PartialManagedServiceIdentity +from ._models_py3 import PartialMinimalTrackedResource +from ._models_py3 import PartialMinimalTrackedResourceWithIdentity +from ._models_py3 import PartialMinimalTrackedResourceWithSku +from ._models_py3 import PartialSku +from ._models_py3 import Password +from ._models_py3 import PersonalComputeInstanceSettings +from ._models_py3 import PipelineJob +from ._models_py3 import PrivateEndpoint +from ._models_py3 import PrivateEndpointConnection +from ._models_py3 import PrivateEndpointConnectionListResult +from ._models_py3 import PrivateLinkResource +from ._models_py3 import PrivateLinkResourceListResult +from ._models_py3 import PrivateLinkServiceConnectionState +from ._models_py3 import ProbeSettings +from ._models_py3 import ProgressMetrics +from ._models_py3 import PyTorch +from ._models_py3 import QuotaBaseProperties +from ._models_py3 import QuotaUpdateParameters +from ._models_py3 import RandomSamplingAlgorithm +from ._models_py3 import RecurrenceSchedule +from ._models_py3 import RecurrenceTrigger +from ._models_py3 import RegenerateEndpointKeysRequest +from ._models_py3 import RegistryListCredentialsResult +from ._models_py3 import Regression +from ._models_py3 import RegressionTrainingSettings +from ._models_py3 import Resource +from ._models_py3 import ResourceBase +from ._models_py3 import ResourceConfiguration +from ._models_py3 import ResourceId +from ._models_py3 import ResourceName +from ._models_py3 import ResourceQuota +from ._models_py3 import Route +from ._models_py3 import SASAuthTypeWorkspaceConnectionProperties +from ._models_py3 import SamplingAlgorithm +from ._models_py3 import SasDatastoreCredentials +from ._models_py3 import SasDatastoreSecrets +from ._models_py3 import ScaleSettings +from ._models_py3 import ScaleSettingsInformation +from ._models_py3 import Schedule +from ._models_py3 import ScheduleActionBase +from ._models_py3 import ScheduleBase +from ._models_py3 import ScheduleProperties +from ._models_py3 import ScheduleResourceArmPaginatedResult +from ._models_py3 import ScriptReference +from ._models_py3 import ScriptsToExecute +from ._models_py3 import Seasonality +from ._models_py3 import ServiceManagedResourcesSettings +from ._models_py3 import ServicePrincipalDatastoreCredentials +from ._models_py3 import ServicePrincipalDatastoreSecrets +from ._models_py3 import SetupScripts +from ._models_py3 import SharedPrivateLinkResource +from ._models_py3 import Sku +from ._models_py3 import SkuCapacity +from ._models_py3 import SkuResource +from ._models_py3 import SkuResourceArmPaginatedResult +from ._models_py3 import SkuSetting +from ._models_py3 import SparkJob +from ._models_py3 import SparkJobEntry +from ._models_py3 import SparkJobPythonEntry +from ._models_py3 import SparkJobScalaEntry +from ._models_py3 import SparkResourceConfiguration +from ._models_py3 import SslConfiguration +from ._models_py3 import StackEnsembleSettings +from ._models_py3 import StatusMessage +from ._models_py3 import SweepJob +from ._models_py3 import SweepJobLimits +from ._models_py3 import SynapseSpark +from ._models_py3 import SynapseSparkProperties +from ._models_py3 import SystemData +from ._models_py3 import SystemService +from ._models_py3 import TableVertical +from ._models_py3 import TableVerticalFeaturizationSettings +from ._models_py3 import TableVerticalLimitSettings +from ._models_py3 import TargetLags +from ._models_py3 import TargetRollingWindowSize +from ._models_py3 import TargetUtilizationScaleSettings +from ._models_py3 import TensorFlow +from ._models_py3 import TextClassification +from ._models_py3 import TextClassificationMultilabel +from ._models_py3 import TextNer +from ._models_py3 import TmpfsOptions +from ._models_py3 import TrackedResource +from ._models_py3 import TrainingSettings +from ._models_py3 import TrialComponent +from ._models_py3 import TriggerBase +from ._models_py3 import TritonModelJobInput +from ._models_py3 import TritonModelJobOutput +from ._models_py3 import TruncationSelectionPolicy +from ._models_py3 import UpdateWorkspaceQuotas +from ._models_py3 import UpdateWorkspaceQuotasResult +from ._models_py3 import UriFileDataVersion +from ._models_py3 import UriFileJobInput +from ._models_py3 import UriFileJobOutput +from ._models_py3 import UriFolderDataVersion +from ._models_py3 import UriFolderJobInput +from ._models_py3 import UriFolderJobOutput +from ._models_py3 import Usage +from ._models_py3 import UsageName +from ._models_py3 import UserAccountCredentials +from ._models_py3 import UserAssignedIdentity +from ._models_py3 import UserIdentity +from ._models_py3 import UsernamePasswordAuthTypeWorkspaceConnectionProperties +from ._models_py3 import VirtualMachine +from ._models_py3 import VirtualMachineImage +from ._models_py3 import VirtualMachineSchema +from ._models_py3 import VirtualMachineSchemaProperties +from ._models_py3 import VirtualMachineSecrets +from ._models_py3 import VirtualMachineSecretsSchema +from ._models_py3 import VirtualMachineSize +from ._models_py3 import VirtualMachineSizeListResult +from ._models_py3 import VirtualMachineSshCredentials +from ._models_py3 import VolumeDefinition +from ._models_py3 import VolumeOptions +from ._models_py3 import Workspace +from ._models_py3 import WorkspaceConnectionManagedIdentity +from ._models_py3 import WorkspaceConnectionPersonalAccessToken +from ._models_py3 import WorkspaceConnectionPropertiesV2 +from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResource +from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult +from ._models_py3 import WorkspaceConnectionSharedAccessSignature +from ._models_py3 import WorkspaceConnectionUsernamePassword +from ._models_py3 import WorkspaceListResult +from ._models_py3 import WorkspaceUpdateParameters + from ._azure_machine_learning_workspaces_enums import ( AllocationState, ApplicationSharingPolicy, + Autosave, + BatchLoggingLevel, + BatchOutputAction, BillingCurrency, + BlockedTransformers, + Caching, + ClassificationModels, + ClassificationMultilabelPrimaryMetrics, + ClassificationPrimaryMetrics, + ClusterPurpose, + ComputeInstanceAuthorizationType, ComputeInstanceState, + ComputePowerAction, + ComputeProvisioningState, ComputeType, + ConnectionAuthType, + ConnectionCategory, + ContainerType, + CreatedByType, + CredentialsType, + DataType, + DatastoreType, + DeploymentProvisioningState, + DiagnoseResultLevel, + DistributionType, + EarlyTerminationPolicyType, + EgressPublicNetworkAccessType, EncryptionStatus, + EndpointAuthMode, + EndpointComputeType, + EndpointProvisioningState, + EnvironmentType, + EnvironmentVariableType, + ExportFormatType, + FeatureLags, + FeaturizationMode, + ForecastHorizonMode, + ForecastingModels, + ForecastingPrimaryMetrics, + Goal, + IdentityConfigurationType, + ImageAnnotationType, + ImageType, + InputDeliveryMode, + InstanceSegmentationPrimaryMetrics, + JobInputType, + JobLimitsType, + JobOutputType, + JobProvisioningState, + JobStatus, + JobType, + KeyType, + LearningRateScheduler, + ListViewType, + LoadBalancerType, + LogVerbosity, + MLAssistConfigurationType, + ManagedServiceIdentityType, + MediaType, + ModelSize, + MountAction, + MountState, + NCrossValidationsMode, + Network, NodeState, + ObjectDetectionPrimaryMetrics, + OperatingSystemType, OperationName, OperationStatus, + OperationTrigger, + OrderString, + OsType, + OutputDeliveryMode, PrivateEndpointConnectionProvisioningState, PrivateEndpointServiceConnectionStatus, - ProvisioningState, + Protocol, + ProvisioningStatus, + PublicNetworkAccess, + PublicNetworkAccessType, QuotaUnit, - ReasonCode, + RandomSamplingAlgorithmRule, + RecurrenceFrequency, + ReferenceType, + RegressionModels, + RegressionPrimaryMetrics, RemoteLoginPortPublicAccess, - ResourceIdentityType, + SamplingAlgorithmType, + ScaleType, + ScheduleActionType, + ScheduleProvisioningState, + ScheduleProvisioningStatus, + ScheduleStatus, + SeasonalityMode, + SecretsType, + ServiceDataAccessAuthIdentity, + ShortSeriesHandlingConfiguration, + SkuScaleType, + SkuTier, + SourceType, + SparkJobEntryType, SshPublicAccess, - SslConfigurationStatus, + SslConfigStatus, + StackMetaLearnerType, Status, + StatusMessageLevel, + StochasticOptimizer, + StorageAccountType, + TargetAggregationFunction, + TargetLagsMode, + TargetRollingWindowSizeMode, + TaskType, + TextAnnotationType, + TriggerType, UnderlyingResourceAction, UnitOfMeasure, UsageUnit, + UseStl, VMPriceOSType, VMTier, + ValidationMetricType, + ValueFormat, VmPriority, + VolumeDefinitionType, + WeekDay, + WorkspaceProvisioningState, ) - +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ 'AKS', - 'AKSProperties', + 'AKSSchema', + 'AKSSchemaProperties', + 'AccountKeyDatastoreCredentials', + 'AccountKeyDatastoreSecrets', 'AksComputeSecrets', + 'AksComputeSecretsProperties', 'AksNetworkingConfiguration', 'AmlCompute', 'AmlComputeNodeInformation', 'AmlComputeNodesInformation', 'AmlComputeProperties', + 'AmlComputeSchema', + 'AmlOperation', + 'AmlOperationDisplay', + 'AmlOperationListResult', + 'AmlToken', 'AmlUserFeature', + 'AssetBase', + 'AssetContainer', + 'AssetJobInput', + 'AssetJobOutput', + 'AssetReferenceBase', + 'AssignedUser', + 'AutoForecastHorizon', + 'AutoMLJob', + 'AutoMLVertical', + 'AutoNCrossValidations', + 'AutoPauseProperties', + 'AutoScaleProperties', + 'AutoSeasonality', + 'AutoTargetLags', + 'AutoTargetRollingWindowSize', + 'AzureBlobDatastore', + 'AzureDataLakeGen1Datastore', + 'AzureDataLakeGen2Datastore', + 'AzureDatastore', + 'AzureFileDatastore', + 'BanditPolicy', + 'BatchDeployment', + 'BatchDeploymentProperties', + 'BatchDeploymentTrackedResourceArmPaginatedResult', + 'BatchEndpoint', + 'BatchEndpointDefaults', + 'BatchEndpointProperties', + 'BatchEndpointTrackedResourceArmPaginatedResult', + 'BatchRetrySettings', + 'BayesianSamplingAlgorithm', + 'BindOptions', + 'BuildContext', + 'CertificateDatastoreCredentials', + 'CertificateDatastoreSecrets', + 'Classification', + 'ClassificationTrainingSettings', 'ClusterUpdateParameters', - 'ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties', + 'CocoExportSummary', + 'CodeConfiguration', + 'CodeContainer', + 'CodeContainerProperties', + 'CodeContainerResourceArmPaginatedResult', + 'CodeVersion', + 'CodeVersionProperties', + 'CodeVersionResourceArmPaginatedResult', + 'ColumnTransformer', + 'CommandJob', + 'CommandJobLimits', + 'ComponentContainer', + 'ComponentContainerProperties', + 'ComponentContainerResourceArmPaginatedResult', + 'ComponentVersion', + 'ComponentVersionProperties', + 'ComponentVersionResourceArmPaginatedResult', 'Compute', 'ComputeInstance', 'ComputeInstanceApplication', 'ComputeInstanceConnectivityEndpoints', + 'ComputeInstanceContainer', 'ComputeInstanceCreatedBy', + 'ComputeInstanceDataDisk', + 'ComputeInstanceDataMount', + 'ComputeInstanceEnvironmentInfo', 'ComputeInstanceLastOperation', 'ComputeInstanceProperties', + 'ComputeInstanceSchema', 'ComputeInstanceSshSettings', - 'ComputeNodesInformation', + 'ComputeInstanceVersion', 'ComputeResource', + 'ComputeResourceSchema', + 'ComputeSchedules', 'ComputeSecrets', + 'ComputeStartStopSchedule', + 'ContainerResourceRequirements', + 'ContainerResourceSettings', + 'CosmosDbSettings', + 'CronTrigger', + 'CsvExportSummary', + 'CustomForecastHorizon', + 'CustomModelJobInput', + 'CustomModelJobOutput', + 'CustomNCrossValidations', + 'CustomSeasonality', + 'CustomService', + 'CustomTargetLags', + 'CustomTargetRollingWindowSize', + 'DataContainer', + 'DataContainerProperties', + 'DataContainerResourceArmPaginatedResult', 'DataFactory', 'DataLakeAnalytics', - 'DataLakeAnalyticsProperties', + 'DataLakeAnalyticsSchema', + 'DataLakeAnalyticsSchemaProperties', + 'DataPathAssetReference', + 'DataVersionBase', + 'DataVersionBaseProperties', + 'DataVersionBaseResourceArmPaginatedResult', 'Databricks', 'DatabricksComputeSecrets', + 'DatabricksComputeSecretsProperties', 'DatabricksProperties', + 'DatabricksSchema', + 'DatasetExportSummary', + 'Datastore', + 'DatastoreCredentials', + 'DatastoreProperties', + 'DatastoreResourceArmPaginatedResult', + 'DatastoreSecrets', + 'DefaultScaleSettings', + 'DeploymentLogs', + 'DeploymentLogsRequest', + 'DeploymentResourceConfiguration', + 'DiagnoseRequestProperties', + 'DiagnoseResponseResult', + 'DiagnoseResponseResultValue', + 'DiagnoseResult', + 'DiagnoseWorkspaceParameters', + 'DistributionConfiguration', + 'Docker', + 'EarlyTerminationPolicy', + 'EncryptionKeyVaultProperties', + 'EncryptionKeyVaultUpdateProperties', 'EncryptionProperty', + 'EncryptionUpdateProperties', + 'Endpoint', + 'EndpointAuthKeys', + 'EndpointAuthToken', + 'EndpointDeploymentPropertiesBase', + 'EndpointPropertiesBase', + 'EndpointScheduleAction', + 'EnvironmentContainer', + 'EnvironmentContainerProperties', + 'EnvironmentContainerResourceArmPaginatedResult', + 'EnvironmentVariable', + 'EnvironmentVersion', + 'EnvironmentVersionProperties', + 'EnvironmentVersionResourceArmPaginatedResult', + 'ErrorAdditionalInfo', 'ErrorDetail', 'ErrorResponse', 'EstimatedVMPrice', 'EstimatedVMPrices', + 'ExportSummary', + 'ExternalFQDNResponse', + 'FQDNEndpoint', + 'FQDNEndpointDetail', + 'FQDNEndpoints', + 'FQDNEndpointsProperties', + 'FeaturizationSettings', + 'FlavorData', + 'ForecastHorizon', + 'Forecasting', + 'ForecastingSettings', + 'ForecastingTrainingSettings', + 'GridSamplingAlgorithm', 'HDInsight', 'HDInsightProperties', - 'Identity', - 'KeyVaultProperties', + 'HDInsightSchema', + 'HdfsDatastore', + 'IdAssetReference', + 'IdentityConfiguration', + 'IdentityForCmk', + 'IdleShutdownSetting', + 'Image', + 'ImageClassification', + 'ImageClassificationBase', + 'ImageClassificationMultilabel', + 'ImageInstanceSegmentation', + 'ImageLimitSettings', + 'ImageModelDistributionSettings', + 'ImageModelDistributionSettingsClassification', + 'ImageModelDistributionSettingsObjectDetection', + 'ImageModelSettings', + 'ImageModelSettingsClassification', + 'ImageModelSettingsObjectDetection', + 'ImageObjectDetection', + 'ImageObjectDetectionBase', + 'ImageSweepLimitSettings', + 'ImageSweepSettings', + 'ImageVertical', + 'InferenceContainerProperties', + 'InstanceTypeSchema', + 'InstanceTypeSchemaResources', + 'JobBase', + 'JobBaseProperties', + 'JobBaseResourceArmPaginatedResult', + 'JobInput', + 'JobLimits', + 'JobOutput', + 'JobResourceConfiguration', + 'JobScheduleAction', + 'JobService', + 'KerberosCredentials', + 'KerberosKeytabCredentials', + 'KerberosKeytabSecrets', + 'KerberosPasswordCredentials', + 'KerberosPasswordSecrets', + 'Kubernetes', + 'KubernetesOnlineDeployment', + 'KubernetesProperties', + 'KubernetesSchema', + 'LabelCategory', + 'LabelClass', + 'LabelingDataConfiguration', + 'LabelingJob', + 'LabelingJobImageProperties', + 'LabelingJobInstructions', + 'LabelingJobMediaProperties', + 'LabelingJobProperties', + 'LabelingJobResourceArmPaginatedResult', + 'LabelingJobTextProperties', 'ListAmlUserFeatureResult', + 'ListNotebookKeysResult', + 'ListStorageAccountKeysResult', 'ListUsagesResult', 'ListWorkspaceKeysResult', 'ListWorkspaceQuotas', - 'MachineLearningServiceError', + 'LiteralJobInput', + 'MLAssistConfiguration', + 'MLAssistConfigurationDisabled', + 'MLAssistConfigurationEnabled', + 'MLFlowModelJobInput', + 'MLFlowModelJobOutput', + 'MLTableData', + 'MLTableJobInput', + 'MLTableJobOutput', + 'ManagedIdentity', + 'ManagedIdentityAuthTypeWorkspaceConnectionProperties', + 'ManagedOnlineDeployment', + 'ManagedServiceIdentity', + 'MedianStoppingPolicy', + 'ModelContainer', + 'ModelContainerProperties', + 'ModelContainerResourceArmPaginatedResult', + 'ModelVersion', + 'ModelVersionProperties', + 'ModelVersionResourceArmPaginatedResult', + 'Mpi', + 'NCrossValidations', + 'NlpVertical', + 'NlpVerticalFeaturizationSettings', + 'NlpVerticalLimitSettings', 'NodeStateCounts', - 'NotebookListCredentialsResult', + 'NoneAuthTypeWorkspaceConnectionProperties', + 'NoneDatastoreCredentials', + 'NotebookAccessTokenResult', 'NotebookPreparationError', 'NotebookResourceInfo', - 'Operation', - 'OperationDisplay', - 'OperationListResult', + 'Objective', + 'OnlineDeployment', + 'OnlineDeploymentProperties', + 'OnlineDeploymentTrackedResourceArmPaginatedResult', + 'OnlineEndpoint', + 'OnlineEndpointProperties', + 'OnlineEndpointTrackedResourceArmPaginatedResult', + 'OnlineRequestSettings', + 'OnlineScaleSettings', + 'OutputPathAssetReference', + 'PATAuthTypeWorkspaceConnectionProperties', 'PaginatedComputeResourcesList', - 'PaginatedWorkspaceConnectionsList', + 'PartialBatchDeployment', + 'PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties', + 'PartialManagedServiceIdentity', + 'PartialMinimalTrackedResource', + 'PartialMinimalTrackedResourceWithIdentity', + 'PartialMinimalTrackedResourceWithSku', + 'PartialSku', 'Password', + 'PersonalComputeInstanceSettings', + 'PipelineJob', 'PrivateEndpoint', 'PrivateEndpointConnection', + 'PrivateEndpointConnectionListResult', 'PrivateLinkResource', 'PrivateLinkResourceListResult', 'PrivateLinkServiceConnectionState', + 'ProbeSettings', + 'ProgressMetrics', + 'PyTorch', 'QuotaBaseProperties', 'QuotaUpdateParameters', + 'RandomSamplingAlgorithm', + 'RecurrenceSchedule', + 'RecurrenceTrigger', + 'RegenerateEndpointKeysRequest', 'RegistryListCredentialsResult', + 'Regression', + 'RegressionTrainingSettings', 'Resource', + 'ResourceBase', + 'ResourceConfiguration', 'ResourceId', 'ResourceName', 'ResourceQuota', - 'ResourceSkuLocationInfo', - 'ResourceSkuZoneDetails', - 'Restriction', - 'SKUCapability', + 'Route', + 'SASAuthTypeWorkspaceConnectionProperties', + 'SamplingAlgorithm', + 'SasDatastoreCredentials', + 'SasDatastoreSecrets', 'ScaleSettings', - 'ServicePrincipalCredentials', + 'ScaleSettingsInformation', + 'Schedule', + 'ScheduleActionBase', + 'ScheduleBase', + 'ScheduleProperties', + 'ScheduleResourceArmPaginatedResult', + 'ScriptReference', + 'ScriptsToExecute', + 'Seasonality', + 'ServiceManagedResourcesSettings', + 'ServicePrincipalDatastoreCredentials', + 'ServicePrincipalDatastoreSecrets', + 'SetupScripts', 'SharedPrivateLinkResource', 'Sku', - 'SkuListResult', - 'SkuSettings', + 'SkuCapacity', + 'SkuResource', + 'SkuResourceArmPaginatedResult', + 'SkuSetting', + 'SparkJob', + 'SparkJobEntry', + 'SparkJobPythonEntry', + 'SparkJobScalaEntry', + 'SparkResourceConfiguration', 'SslConfiguration', + 'StackEnsembleSettings', + 'StatusMessage', + 'SweepJob', + 'SweepJobLimits', + 'SynapseSpark', + 'SynapseSparkProperties', + 'SystemData', 'SystemService', + 'TableVertical', + 'TableVerticalFeaturizationSettings', + 'TableVerticalLimitSettings', + 'TargetLags', + 'TargetRollingWindowSize', + 'TargetUtilizationScaleSettings', + 'TensorFlow', + 'TextClassification', + 'TextClassificationMultilabel', + 'TextNer', + 'TmpfsOptions', + 'TrackedResource', + 'TrainingSettings', + 'TrialComponent', + 'TriggerBase', + 'TritonModelJobInput', + 'TritonModelJobOutput', + 'TruncationSelectionPolicy', 'UpdateWorkspaceQuotas', 'UpdateWorkspaceQuotasResult', + 'UriFileDataVersion', + 'UriFileJobInput', + 'UriFileJobOutput', + 'UriFolderDataVersion', + 'UriFolderJobInput', + 'UriFolderJobOutput', 'Usage', 'UsageName', 'UserAccountCredentials', + 'UserAssignedIdentity', + 'UserIdentity', + 'UsernamePasswordAuthTypeWorkspaceConnectionProperties', 'VirtualMachine', - 'VirtualMachineProperties', + 'VirtualMachineImage', + 'VirtualMachineSchema', + 'VirtualMachineSchemaProperties', 'VirtualMachineSecrets', + 'VirtualMachineSecretsSchema', 'VirtualMachineSize', 'VirtualMachineSizeListResult', 'VirtualMachineSshCredentials', + 'VolumeDefinition', + 'VolumeOptions', 'Workspace', - 'WorkspaceConnection', - 'WorkspaceConnectionDto', + 'WorkspaceConnectionManagedIdentity', + 'WorkspaceConnectionPersonalAccessToken', + 'WorkspaceConnectionPropertiesV2', + 'WorkspaceConnectionPropertiesV2BasicResource', + 'WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult', + 'WorkspaceConnectionSharedAccessSignature', + 'WorkspaceConnectionUsernamePassword', 'WorkspaceListResult', - 'WorkspaceSku', 'WorkspaceUpdateParameters', 'AllocationState', 'ApplicationSharingPolicy', + 'Autosave', + 'BatchLoggingLevel', + 'BatchOutputAction', 'BillingCurrency', + 'BlockedTransformers', + 'Caching', + 'ClassificationModels', + 'ClassificationMultilabelPrimaryMetrics', + 'ClassificationPrimaryMetrics', + 'ClusterPurpose', + 'ComputeInstanceAuthorizationType', 'ComputeInstanceState', + 'ComputePowerAction', + 'ComputeProvisioningState', 'ComputeType', + 'ConnectionAuthType', + 'ConnectionCategory', + 'ContainerType', + 'CreatedByType', + 'CredentialsType', + 'DataType', + 'DatastoreType', + 'DeploymentProvisioningState', + 'DiagnoseResultLevel', + 'DistributionType', + 'EarlyTerminationPolicyType', + 'EgressPublicNetworkAccessType', 'EncryptionStatus', + 'EndpointAuthMode', + 'EndpointComputeType', + 'EndpointProvisioningState', + 'EnvironmentType', + 'EnvironmentVariableType', + 'ExportFormatType', + 'FeatureLags', + 'FeaturizationMode', + 'ForecastHorizonMode', + 'ForecastingModels', + 'ForecastingPrimaryMetrics', + 'Goal', + 'IdentityConfigurationType', + 'ImageAnnotationType', + 'ImageType', + 'InputDeliveryMode', + 'InstanceSegmentationPrimaryMetrics', + 'JobInputType', + 'JobLimitsType', + 'JobOutputType', + 'JobProvisioningState', + 'JobStatus', + 'JobType', + 'KeyType', + 'LearningRateScheduler', + 'ListViewType', + 'LoadBalancerType', + 'LogVerbosity', + 'MLAssistConfigurationType', + 'ManagedServiceIdentityType', + 'MediaType', + 'ModelSize', + 'MountAction', + 'MountState', + 'NCrossValidationsMode', + 'Network', 'NodeState', + 'ObjectDetectionPrimaryMetrics', + 'OperatingSystemType', 'OperationName', 'OperationStatus', + 'OperationTrigger', + 'OrderString', + 'OsType', + 'OutputDeliveryMode', 'PrivateEndpointConnectionProvisioningState', 'PrivateEndpointServiceConnectionStatus', - 'ProvisioningState', + 'Protocol', + 'ProvisioningStatus', + 'PublicNetworkAccess', + 'PublicNetworkAccessType', 'QuotaUnit', - 'ReasonCode', + 'RandomSamplingAlgorithmRule', + 'RecurrenceFrequency', + 'ReferenceType', + 'RegressionModels', + 'RegressionPrimaryMetrics', 'RemoteLoginPortPublicAccess', - 'ResourceIdentityType', + 'SamplingAlgorithmType', + 'ScaleType', + 'ScheduleActionType', + 'ScheduleProvisioningState', + 'ScheduleProvisioningStatus', + 'ScheduleStatus', + 'SeasonalityMode', + 'SecretsType', + 'ServiceDataAccessAuthIdentity', + 'ShortSeriesHandlingConfiguration', + 'SkuScaleType', + 'SkuTier', + 'SourceType', + 'SparkJobEntryType', 'SshPublicAccess', - 'SslConfigurationStatus', + 'SslConfigStatus', + 'StackMetaLearnerType', 'Status', + 'StatusMessageLevel', + 'StochasticOptimizer', + 'StorageAccountType', + 'TargetAggregationFunction', + 'TargetLagsMode', + 'TargetRollingWindowSizeMode', + 'TaskType', + 'TextAnnotationType', + 'TriggerType', 'UnderlyingResourceAction', 'UnitOfMeasure', 'UsageUnit', + 'UseStl', 'VMPriceOSType', 'VMTier', + 'ValidationMetricType', + 'ValueFormat', 'VmPriority', + 'VolumeDefinitionType', + 'WeekDay', + 'WorkspaceProvisioningState', ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py index efe5686a0c99d..9a077c0ebfd2b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py @@ -6,27 +6,11 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum, EnumMeta -from six import with_metaclass - -class _CaseInsensitiveEnumMeta(EnumMeta): - def __getitem__(self, name): - return super().__getitem__(name.upper()) - - def __getattr__(cls, name): - """Return the enum member matching `name` - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - """ - try: - return cls._member_map_[name.upper()] - except KeyError: - raise AttributeError(name) - - -class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class AllocationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Allocation state of the compute. Possible values are: steady - Indicates that the compute is not resizing. There are no changes to the number of compute nodes in the compute in progress. A compute enters this state when it is created and when no operations are being performed on the @@ -37,7 +21,7 @@ class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STEADY = "Steady" RESIZING = "Resizing" -class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ApplicationSharingPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Policy for sharing applications on this compute instance among users of parent workspace. If Personal, only the creator can access applications on this compute instance. When Shared, any workspace user can access applications on this instance depending on his/her assigned role. @@ -46,14 +30,196 @@ class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu PERSONAL = "Personal" SHARED = "Shared" -class BillingCurrency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class Autosave(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Auto save settings. + """ + + NONE = "None" + LOCAL = "Local" + REMOTE = "Remote" + +class BatchLoggingLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Log verbosity for batch inferencing. + Increasing verbosity order for logging is : Warning, Info and Debug. + The default value is Info. + """ + + INFO = "Info" + WARNING = "Warning" + DEBUG = "Debug" + +class BatchOutputAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine how batch inferencing will handle output + """ + + SUMMARY_ONLY = "SummaryOnly" + APPEND_ROW = "AppendRow" + +class BillingCurrency(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Three lettered code specifying the currency of the VM price. Example: USD """ USD = "USD" -class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Current state of a ComputeInstance. +class BlockedTransformers(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all classification models supported by AutoML. + """ + + #: Target encoding for text data. + TEXT_TARGET_ENCODER = "TextTargetEncoder" + #: Ohe hot encoding creates a binary feature transformation. + ONE_HOT_ENCODER = "OneHotEncoder" + #: Target encoding for categorical data. + CAT_TARGET_ENCODER = "CatTargetEncoder" + #: Tf-Idf stands for, term-frequency times inverse document-frequency. This is a common term + #: weighting scheme for identifying information from documents. + TF_IDF = "TfIdf" + #: Weight of Evidence encoding is a technique used to encode categorical variables. It uses the + #: natural log of the P(1)/P(0) to create weights. + WO_E_TARGET_ENCODER = "WoETargetEncoder" + #: Label encoder converts labels/categorical variables in a numerical form. + LABEL_ENCODER = "LabelEncoder" + #: Word embedding helps represents words or phrases as a vector, or a series of numbers. + WORD_EMBEDDING = "WordEmbedding" + #: Naive Bayes is a classified that is used for classification of discrete features that are + #: categorically distributed. + NAIVE_BAYES = "NaiveBayes" + #: Count Vectorizer converts a collection of text documents to a matrix of token counts. + COUNT_VECTORIZER = "CountVectorizer" + #: Hashing One Hot Encoder can turn categorical variables into a limited number of new features. + #: This is often used for high-cardinality categorical features. + HASH_ONE_HOT_ENCODER = "HashOneHotEncoder" + +class Caching(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Caching type of Data Disk. + """ + + NONE = "None" + READ_ONLY = "ReadOnly" + READ_WRITE = "ReadWrite" + +class ClassificationModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all classification models supported by AutoML. + """ + + #: Logistic regression is a fundamental classification technique. + #: It belongs to the group of linear classifiers and is somewhat similar to polynomial and linear + #: regression. + #: Logistic regression is fast and relatively uncomplicated, and it's convenient for you to + #: interpret the results. + #: Although it's essentially a method for binary classification, it can also be applied to + #: multiclass problems. + LOGISTIC_REGRESSION = "LogisticRegression" + #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + SGD = "SGD" + #: The multinomial Naive Bayes classifier is suitable for classification with discrete features + #: (e.g., word counts for text classification). + #: The multinomial distribution normally requires integer feature counts. However, in practice, + #: fractional counts such as tf-idf may also work. + MULTINOMIAL_NAIVE_BAYES = "MultinomialNaiveBayes" + #: Naive Bayes classifier for multivariate Bernoulli models. + BERNOULLI_NAIVE_BAYES = "BernoulliNaiveBayes" + #: A support vector machine (SVM) is a supervised machine learning model that uses classification + #: algorithms for two-group classification problems. + #: After giving an SVM model sets of labeled training data for each category, they're able to + #: categorize new text. + SVM = "SVM" + #: A support vector machine (SVM) is a supervised machine learning model that uses classification + #: algorithms for two-group classification problems. + #: After giving an SVM model sets of labeled training data for each category, they're able to + #: categorize new text. + #: Linear SVM performs best when input data is linear, i.e., data can be easily classified by + #: drawing the straight line between classified values on a plotted graph. + LINEAR_SVM = "LinearSVM" + #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set. + KNN = "KNN" + #: Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features. + DECISION_TREE = "DecisionTree" + #: Random forest is a supervised learning algorithm. + #: The "forest"\\ it builds, is an ensemble of decision trees, usually trained with the + #: “bagging”\\ method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result. + RANDOM_FOREST = "RandomForest" + #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm. + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. + LIGHT_GBM = "LightGBM" + #: The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution. + GRADIENT_BOOSTING = "GradientBoosting" + #: XGBoost: Extreme Gradient Boosting Algorithm. This algorithm is used for structured data where + #: target column values can be divided into distinct class values. + XG_BOOST_CLASSIFIER = "XGBoostClassifier" + +class ClassificationMultilabelPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for classification multilabel tasks. + """ + + #: AUC is the Area under the curve. + #: This metric represents arithmetic mean of the score for each class, + #: weighted by the number of true instances in each class. + AUC_WEIGHTED = "AUCWeighted" + #: Accuracy is the ratio of predictions that exactly match the true class labels. + ACCURACY = "Accuracy" + #: Normalized macro recall is recall macro-averaged and normalized, so that random + #: performance has a score of 0, and perfect performance has a score of 1. + NORM_MACRO_RECALL = "NormMacroRecall" + #: The arithmetic mean of the average precision score for each class, weighted by + #: the number of true instances in each class. + AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" + #: The arithmetic mean of precision for each class, weighted by number of true instances in each + #: class. + PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" + #: Intersection Over Union. Intersection of predictions divided by union of predictions. + IOU = "IOU" + +class ClassificationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for classification tasks. + """ + + #: AUC is the Area under the curve. + #: This metric represents arithmetic mean of the score for each class, + #: weighted by the number of true instances in each class. + AUC_WEIGHTED = "AUCWeighted" + #: Accuracy is the ratio of predictions that exactly match the true class labels. + ACCURACY = "Accuracy" + #: Normalized macro recall is recall macro-averaged and normalized, so that random + #: performance has a score of 0, and perfect performance has a score of 1. + NORM_MACRO_RECALL = "NormMacroRecall" + #: The arithmetic mean of the average precision score for each class, weighted by + #: the number of true instances in each class. + AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" + #: The arithmetic mean of precision for each class, weighted by number of true instances in each + #: class. + PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" + +class ClusterPurpose(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Intended usage of the cluster + """ + + FAST_PROD = "FastProd" + DENSE_PROD = "DenseProd" + DEV_TEST = "DevTest" + +class ComputeInstanceAuthorizationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Compute Instance Authorization type. Available values are personal (default). + """ + + PERSONAL = "personal" + +class ComputeInstanceState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Current state of an ComputeInstance. """ CREATING = "Creating" @@ -72,11 +238,32 @@ class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): UNKNOWN = "Unknown" UNUSABLE = "Unusable" -class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ComputePowerAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The compute power action. + """ + + START = "Start" + STOP = "Stop" + +class ComputeProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provision state of the cluster. Valid values are Unknown, Updating, Provisioning, + Succeeded, and Failed. + """ + + UNKNOWN = "Unknown" + UPDATING = "Updating" + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + +class ComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of compute """ AKS = "AKS" + KUBERNETES = "Kubernetes" AML_COMPUTE = "AmlCompute" COMPUTE_INSTANCE = "ComputeInstance" DATA_FACTORY = "DataFactory" @@ -84,15 +271,548 @@ class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): HD_INSIGHT = "HDInsight" DATABRICKS = "Databricks" DATA_LAKE_ANALYTICS = "DataLakeAnalytics" + SYNAPSE_SPARK = "SynapseSpark" + +class ConnectionAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Authentication type of the connection target + """ + + PAT = "PAT" + MANAGED_IDENTITY = "ManagedIdentity" + USERNAME_PASSWORD = "UsernamePassword" + NONE = "None" + SAS = "SAS" + +class ConnectionCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Category of the connection + """ + + PYTHON_FEED = "PythonFeed" + CONTAINER_REGISTRY = "ContainerRegistry" + GIT = "Git" + +class ContainerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + STORAGE_INITIALIZER = "StorageInitializer" + INFERENCE_SERVER = "InferenceServer" + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class CredentialsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore credentials type. + """ + + ACCOUNT_KEY = "AccountKey" + CERTIFICATE = "Certificate" + NONE = "None" + SAS = "Sas" + SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_KEYTAB = "KerberosKeytab" + KERBEROS_PASSWORD = "KerberosPassword" + +class DatastoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore contents type. + """ + + AZURE_BLOB = "AzureBlob" + AZURE_DATA_LAKE_GEN1 = "AzureDataLakeGen1" + AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" + AZURE_FILE = "AzureFile" + HDFS = "Hdfs" + +class DataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data. + """ + + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + +class DeploymentProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Possible values for DeploymentProvisioningState. + """ + + CREATING = "Creating" + DELETING = "Deleting" + SCALING = "Scaling" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + +class DiagnoseResultLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Level of workspace setup error + """ + + WARNING = "Warning" + ERROR = "Error" + INFORMATION = "Information" + +class DistributionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job distribution type. + """ + + PY_TORCH = "PyTorch" + TENSOR_FLOW = "TensorFlow" + MPI = "Mpi" + +class EarlyTerminationPolicyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + BANDIT = "Bandit" + MEDIAN_STOPPING = "MedianStopping" + TRUNCATION_SELECTION = "TruncationSelection" + +class EgressPublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine whether PublicNetworkAccess is Enabled or Disabled for egress of a + deployment. + """ -class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + ENABLED = "Enabled" + DISABLED = "Disabled" + +class EncryptionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Indicates whether or not the encryption is enabled for the workspace. """ ENABLED = "Enabled" DISABLED = "Disabled" -class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class EndpointAuthMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine endpoint authentication mode. + """ + + AML_TOKEN = "AMLToken" + KEY = "Key" + AAD_TOKEN = "AADToken" + +class EndpointComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine endpoint compute type. + """ + + MANAGED = "Managed" + KUBERNETES = "Kubernetes" + AZURE_ML_COMPUTE = "AzureMLCompute" + +class EndpointProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of endpoint provisioning. + """ + + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + UPDATING = "Updating" + CANCELED = "Canceled" + +class EnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Environment type is either user created or curated by Azure ML service + """ + + CURATED = "Curated" + USER_CREATED = "UserCreated" + +class EnvironmentVariableType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the Environment Variable. Possible values are: local - For local variable + """ + + LOCAL = "local" + +class ExportFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The format of exported labels. + """ + + DATASET = "Dataset" + COCO = "Coco" + CSV = "CSV" + +class FeatureLags(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Flag for generating lags for the numeric features. + """ + + #: No feature lags generated. + NONE = "None" + #: System auto-generates feature lags. + AUTO = "Auto" + +class FeaturizationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Featurization mode - determines data featurization mode. + """ + + #: Auto mode, system performs featurization without any custom featurization inputs. + AUTO = "Auto" + #: Custom featurization. + CUSTOM = "Custom" + #: Featurization off. 'Forecasting' task cannot use this value. + OFF = "Off" + +class ForecastHorizonMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine forecast horizon selection mode. + """ + + #: Forecast horizon to be determined automatically. + AUTO = "Auto" + #: Use the custom forecast horizon. + CUSTOM = "Custom" + +class ForecastingModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all forecasting models supported by AutoML. + """ + + #: Auto-Autoregressive Integrated Moving Average (ARIMA) model uses time-series data and + #: statistical analysis to interpret the data and make future predictions. + #: This model aims to explain data by using time series data on its past values and uses linear + #: regression to make predictions. + AUTO_ARIMA = "AutoArima" + #: Prophet is a procedure for forecasting time series data based on an additive model where + #: non-linear trends are fit with yearly, weekly, and daily seasonality, plus holiday effects. + #: It works best with time series that have strong seasonal effects and several seasons of + #: historical data. Prophet is robust to missing data and shifts in the trend, and typically + #: handles outliers well. + PROPHET = "Prophet" + #: The Naive forecasting model makes predictions by carrying forward the latest target value for + #: each time-series in the training data. + NAIVE = "Naive" + #: The Seasonal Naive forecasting model makes predictions by carrying forward the latest season of + #: target values for each time-series in the training data. + SEASONAL_NAIVE = "SeasonalNaive" + #: The Average forecasting model makes predictions by carrying forward the average of the target + #: values for each time-series in the training data. + AVERAGE = "Average" + #: The Seasonal Average forecasting model makes predictions by carrying forward the average value + #: of the latest season of data for each time-series in the training data. + SEASONAL_AVERAGE = "SeasonalAverage" + #: Exponential smoothing is a time series forecasting method for univariate data that can be + #: extended to support data with a systematic trend or seasonal component. + EXPONENTIAL_SMOOTHING = "ExponentialSmoothing" + #: An Autoregressive Integrated Moving Average with Explanatory Variable (ARIMAX) model can be + #: viewed as a multiple regression model with one or more autoregressive (AR) terms and/or one or + #: more moving average (MA) terms. + #: This method is suitable for forecasting when data is stationary/non stationary, and + #: multivariate with any type of data pattern, i.e., level/trend /seasonality/cyclicity. + ARIMAX = "Arimax" + #: TCNForecaster: Temporal Convolutional Networks Forecaster. //TODO: Ask forecasting team for + #: brief intro. + TCN_FORECASTER = "TCNForecaster" + #: Elastic net is a popular type of regularized linear regression that combines two popular + #: penalties, specifically the L1 and L2 penalty functions. + ELASTIC_NET = "ElasticNet" + #: The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution. + GRADIENT_BOOSTING = "GradientBoosting" + #: Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features. + DECISION_TREE = "DecisionTree" + #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set. + KNN = "KNN" + #: Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an + #: L1 prior as regularizer. + LASSO_LARS = "LassoLars" + #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + #: It's an inexact but powerful technique. + SGD = "SGD" + #: Random forest is a supervised learning algorithm. + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the “bagging” + #: method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result. + RANDOM_FOREST = "RandomForest" + #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm. + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. + LIGHT_GBM = "LightGBM" + #: XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model + #: using ensemble of base learners. + XG_BOOST_REGRESSOR = "XGBoostRegressor" + +class ForecastingPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Forecasting task. + """ + + #: The Spearman's rank coefficient of correlation is a non-parametric measure of rank correlation. + SPEARMAN_CORRELATION = "SpearmanCorrelation" + #: The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between + #: models with different scales. + NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" + #: The R2 score is one of the performance evaluation measures for forecasting-based machine + #: learning models. + R2_SCORE = "R2Score" + #: The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute + #: Error (MAE) of (time) series with different scales. + NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" + +class Goal(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines supported metric goals for hyperparameter tuning + """ + + MINIMIZE = "Minimize" + MAXIMIZE = "Maximize" + +class IdentityConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine identity framework. + """ + + MANAGED = "Managed" + AML_TOKEN = "AMLToken" + USER_IDENTITY = "UserIdentity" + +class ImageAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of image data. + """ + + CLASSIFICATION = "Classification" + BOUNDING_BOX = "BoundingBox" + INSTANCE_SEGMENTATION = "InstanceSegmentation" + +class ImageType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the image. Possible values are: docker - For docker images. azureml - For AzureML + images + """ + + DOCKER = "docker" + AZUREML = "azureml" + +class InputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the input data delivery mode. + """ + + READ_ONLY_MOUNT = "ReadOnlyMount" + READ_WRITE_MOUNT = "ReadWriteMount" + DOWNLOAD = "Download" + DIRECT = "Direct" + EVAL_MOUNT = "EvalMount" + EVAL_DOWNLOAD = "EvalDownload" + +class InstanceSegmentationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for InstanceSegmentation tasks. + """ + + #: Mean Average Precision (MAP) is the average of AP (Average Precision). + #: AP is calculated for each class and averaged to get the MAP. + MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" + +class JobInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the Job Input Type. + """ + + LITERAL = "literal" + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + CUSTOM_MODEL = "custom_model" + MLFLOW_MODEL = "mlflow_model" + TRITON_MODEL = "triton_model" + +class JobLimitsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + COMMAND = "Command" + SWEEP = "Sweep" + +class JobOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the Job Output Type. + """ + + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + CUSTOM_MODEL = "custom_model" + MLFLOW_MODEL = "mlflow_model" + TRITON_MODEL = "triton_model" + +class JobProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job provisioning state. + """ + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + IN_PROGRESS = "InProgress" + +class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of a job. + """ + + #: Run hasn't started yet. + NOT_STARTED = "NotStarted" + #: Run has started. The user has a run ID. + STARTING = "Starting" + #: (Not used currently) It will be used if ES is creating the compute target. + PROVISIONING = "Provisioning" + #: The run environment is being prepared. + PREPARING = "Preparing" + #: The job is queued in the compute target. For example, in BatchAI the job is in queued state, + #: while waiting for all required nodes to be ready. + QUEUED = "Queued" + #: The job started to run in the compute target. + RUNNING = "Running" + #: Job is completed in the target. It is in output collection state now. + FINALIZING = "Finalizing" + #: Cancellation has been requested for the job. + CANCEL_REQUESTED = "CancelRequested" + #: Job completed successfully. This reflects that both the job itself and output collection states + #: completed successfully. + COMPLETED = "Completed" + #: Job failed. + FAILED = "Failed" + #: Following cancellation request, the job is now successfully canceled. + CANCELED = "Canceled" + #: When heartbeat is enabled, if the run isn't updating any information to RunHistory then the run + #: goes to NotResponding state. + #: NotResponding is the only state that is exempt from strict transition orders. A run can go from + #: NotResponding to any of the previous states. + NOT_RESPONDING = "NotResponding" + #: The job is paused by users. Some adjustment to labeling jobs can be made only in paused state. + PAUSED = "Paused" + #: Default job status if not mapped to all other statuses. + UNKNOWN = "Unknown" + #: The job is in a scheduled state. Job is not in any active state. + SCHEDULED = "Scheduled" + +class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of job. + """ + + AUTO_ML = "AutoML" + COMMAND = "Command" + LABELING = "Labeling" + SWEEP = "Sweep" + PIPELINE = "Pipeline" + SPARK = "Spark" + +class KeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + PRIMARY = "Primary" + SECONDARY = "Secondary" + +class LearningRateScheduler(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Learning rate scheduler enum. + """ + + #: No learning rate scheduler selected. + NONE = "None" + #: Cosine Annealing With Warmup. + WARMUP_COSINE = "WarmupCosine" + #: Step learning rate scheduler. + STEP = "Step" + +class ListViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + ACTIVE_ONLY = "ActiveOnly" + ARCHIVED_ONLY = "ArchivedOnly" + ALL = "All" + +class LoadBalancerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Load Balancer Type + """ + + PUBLIC_IP = "PublicIp" + INTERNAL_LOAD_BALANCER = "InternalLoadBalancer" + +class LogVerbosity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for setting log verbosity. + """ + + #: No logs emitted. + NOT_SET = "NotSet" + #: Debug and above log statements logged. + DEBUG = "Debug" + #: Info and above log statements logged. + INFO = "Info" + #: Warning and above log statements logged. + WARNING = "Warning" + #: Error and above log statements logged. + ERROR = "Error" + #: Only critical statements logged. + CRITICAL = "Critical" + +class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of managed service identity (where both SystemAssigned and UserAssigned types are + allowed). + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + +class MediaType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Media type of data asset. + """ + + IMAGE = "Image" + TEXT = "Text" + +class MLAssistConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class ModelSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Image model size. + """ + + #: No value selected. + NONE = "None" + #: Small size. + SMALL = "Small" + #: Medium size. + MEDIUM = "Medium" + #: Large size. + LARGE = "Large" + #: Extra large size. + EXTRA_LARGE = "ExtraLarge" + +class MountAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mount Action. + """ + + MOUNT = "Mount" + UNMOUNT = "Unmount" + +class MountState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mount state. + """ + + MOUNT_REQUESTED = "MountRequested" + MOUNTED = "Mounted" + MOUNT_FAILED = "MountFailed" + UNMOUNT_REQUESTED = "UnmountRequested" + UNMOUNT_FAILED = "UnmountFailed" + UNMOUNTED = "Unmounted" + +class NCrossValidationsMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Determines how N-Cross validations value is determined. + """ + + #: Determine N-Cross validations value automatically. Supported only for 'Forecasting' AutoML + #: task. + AUTO = "Auto" + #: Use custom N-Cross validations value. + CUSTOM = "Custom" + +class Network(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """network of this container. + """ + + BRIDGE = "Bridge" + HOST = "Host" + +class NodeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """State of the compute node. Values are idle, running, preparing, unusable, leaving and preempted. """ @@ -104,7 +824,22 @@ class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): LEAVING = "leaving" PREEMPTED = "preempted" -class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ObjectDetectionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Image ObjectDetection task. + """ + + #: Mean Average Precision (MAP) is the average of AP (Average Precision). + #: AP is calculated for each class and averaged to get the MAP. + MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" + +class OperatingSystemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of operating system. + """ + + LINUX = "Linux" + WINDOWS = "Windows" + +class OperationName(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Name of the last operation. """ @@ -115,7 +850,7 @@ class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): REIMAGE = "Reimage" DELETE = "Delete" -class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class OperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Operation status. """ @@ -128,7 +863,37 @@ class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): REIMAGE_FAILED = "ReimageFailed" DELETE_FAILED = "DeleteFailed" -class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class OperationTrigger(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Trigger of operation. + """ + + USER = "User" + SCHEDULE = "Schedule" + IDLE_SHUTDOWN = "IdleShutdown" + +class OrderString(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + CREATED_AT_DESC = "CreatedAtDesc" + CREATED_AT_ASC = "CreatedAtAsc" + UPDATED_AT_DESC = "UpdatedAtDesc" + UPDATED_AT_ASC = "UpdatedAtAsc" + +class OsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Compute OS Type + """ + + LINUX = "Linux" + WINDOWS = "Windows" + +class OutputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Output data delivery mode enums. + """ + + READ_WRITE_MOUNT = "ReadWriteMount" + UPLOAD = "Upload" + DIRECT = "Direct" + +class PrivateEndpointConnectionProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The current provisioning state. """ @@ -137,7 +902,7 @@ class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitive DELETING = "Deleting" FAILED = "Failed" -class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The private endpoint connection status. """ @@ -147,34 +912,133 @@ class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnum DISCONNECTED = "Disconnected" TIMEOUT = "Timeout" -class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The current deployment state of workspace resource. The provisioningState is to indicate states - for resource provisioning. +class Protocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Protocol over which communication will happen over this endpoint """ - UNKNOWN = "Unknown" - UPDATING = "Updating" - CREATING = "Creating" - DELETING = "Deleting" - SUCCEEDED = "Succeeded" + TCP = "tcp" + UDP = "udp" + HTTP = "http" + +class ProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of schedule. + """ + + COMPLETED = "Completed" + PROVISIONING = "Provisioning" FAILED = "Failed" - CANCELED = "Canceled" -class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Whether requests from Public Network are allowed. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class PublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine whether PublicNetworkAccess is Enabled or Disabled. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class QuotaUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enum describing the unit of quota measurement. """ COUNT = "Count" -class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The reason for the restriction. +class RandomSamplingAlgorithmRule(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The specific type of random algorithm """ - NOT_SPECIFIED = "NotSpecified" - NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion" - NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription" + RANDOM = "Random" + SOBOL = "Sobol" + +class RecurrenceFrequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to describe the frequency of a recurrence schedule + """ + + #: Minute frequency. + MINUTE = "Minute" + #: Hour frequency. + HOUR = "Hour" + #: Day frequency. + DAY = "Day" + #: Week frequency. + WEEK = "Week" + #: Month frequency. + MONTH = "Month" + +class ReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine which reference method to use for an asset. + """ + + ID = "Id" + DATA_PATH = "DataPath" + OUTPUT_PATH = "OutputPath" + +class RegressionModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all Regression models supported by AutoML. + """ + + #: Elastic net is a popular type of regularized linear regression that combines two popular + #: penalties, specifically the L1 and L2 penalty functions. + ELASTIC_NET = "ElasticNet" + #: The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution. + GRADIENT_BOOSTING = "GradientBoosting" + #: Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features. + DECISION_TREE = "DecisionTree" + #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set. + KNN = "KNN" + #: Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an + #: L1 prior as regularizer. + LASSO_LARS = "LassoLars" + #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + #: It's an inexact but powerful technique. + SGD = "SGD" + #: Random forest is a supervised learning algorithm. + #: The "forest"\\ it builds, is an ensemble of decision trees, usually trained with the + #: “bagging”\\ method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result. + RANDOM_FOREST = "RandomForest" + #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm. + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. + LIGHT_GBM = "LightGBM" + #: XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model + #: using ensemble of base learners. + XG_BOOST_REGRESSOR = "XGBoostRegressor" + +class RegressionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Regression task. + """ -class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + #: The Spearman's rank coefficient of correlation is a nonparametric measure of rank correlation. + SPEARMAN_CORRELATION = "SpearmanCorrelation" + #: The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between + #: models with different scales. + NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" + #: The R2 score is one of the performance evaluation measures for forecasting-based machine + #: learning models. + R2_SCORE = "R2Score" + #: The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute + #: Error (MAE) of (time) series with different scales. + NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" + +class RemoteLoginPortPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed @@ -187,16 +1051,120 @@ class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, DISABLED = "Disabled" NOT_SPECIFIED = "NotSpecified" -class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The identity type. +class SamplingAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + GRID = "Grid" + RANDOM = "Random" + BAYESIAN = "Bayesian" + +class ScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + DEFAULT = "Default" + TARGET_UTILIZATION = "TargetUtilization" + +class ScheduleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + CREATE_JOB = "CreateJob" + INVOKE_BATCH_ENDPOINT = "InvokeBatchEndpoint" + +class ScheduleProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + COMPLETED = "Completed" + PROVISIONING = "Provisioning" + FAILED = "Failed" + +class ScheduleProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + +class ScheduleStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class SeasonalityMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Forecasting seasonality mode. """ - SYSTEM_ASSIGNED = "SystemAssigned" - USER_ASSIGNED = "UserAssigned" - SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + #: Seasonality to be determined automatically. + AUTO = "Auto" + #: Use the custom seasonality value. + CUSTOM = "Custom" + +class SecretsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore secrets type. + """ + + ACCOUNT_KEY = "AccountKey" + CERTIFICATE = "Certificate" + SAS = "Sas" + SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_PASSWORD = "KerberosPassword" + KERBEROS_KEYTAB = "KerberosKeytab" + +class ServiceDataAccessAuthIdentity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + #: Do not use any identity for service data access. + NONE = "None" + #: Use the system assigned managed identity of the Workspace to authenticate service data access. + WORKSPACE_SYSTEM_ASSIGNED_IDENTITY = "WorkspaceSystemAssignedIdentity" + #: Use the user assigned managed identity of the Workspace to authenticate service data access. + WORKSPACE_USER_ASSIGNED_IDENTITY = "WorkspaceUserAssignedIdentity" + +class ShortSeriesHandlingConfiguration(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The parameter defining how if AutoML should handle short time series. + """ + + #: Represents no/null value. + NONE = "None" + #: Short series will be padded if there are no long series, otherwise short series will be + #: dropped. + AUTO = "Auto" + #: All the short series will be padded. + PAD = "Pad" + #: All the short series will be dropped. + DROP = "Drop" + +class SkuScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Node scaling setting for the compute sku. + """ + + #: Automatically scales node count. + AUTOMATIC = "Automatic" + #: Node count scaled upon user request. + MANUAL = "Manual" + #: Fixed set of nodes. NONE = "None" -class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SkuTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """This field is required to be implemented by the Resource Provider if the service has more than + one tier, but is not required on a PUT. + """ + + FREE = "Free" + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + +class SourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Data source type. + """ + + DATASET = "Dataset" + DATASTORE = "Datastore" + URI = "URI" + +class SparkJobEntryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + SPARK_JOB_PYTHON_ENTRY = "SparkJobPythonEntry" + SPARK_JOB_SCALA_ENTRY = "SparkJobScalaEntry" + +class SshPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the public ssh port is open and accessible according to the VNet/subnet policy if applicable. @@ -205,14 +1173,37 @@ class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): ENABLED = "Enabled" DISABLED = "Disabled" -class SslConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SslConfigStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enable or disable ssl for scoring """ DISABLED = "Disabled" ENABLED = "Enabled" + AUTO = "Auto" + +class StackMetaLearnerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The meta-learner is a model trained on the output of the individual heterogeneous models. + Default meta-learners are LogisticRegression for classification tasks (or LogisticRegressionCV + if cross-validation is enabled) and ElasticNet for regression/forecasting tasks (or + ElasticNetCV if cross-validation is enabled). + This parameter can be one of the following strings: LogisticRegression, LogisticRegressionCV, + LightGBMClassifier, ElasticNet, ElasticNetCV, LightGBMRegressor, or LinearRegression + """ + + NONE = "None" + #: Default meta-learners are LogisticRegression for classification tasks. + LOGISTIC_REGRESSION = "LogisticRegression" + #: Default meta-learners are LogisticRegression for classification task when CV is on. + LOGISTIC_REGRESSION_CV = "LogisticRegressionCV" + LIGHT_GBM_CLASSIFIER = "LightGBMClassifier" + #: Default meta-learners are LogisticRegression for regression task. + ELASTIC_NET = "ElasticNet" + #: Default meta-learners are LogisticRegression for regression task when CV is on. + ELASTIC_NET_CV = "ElasticNetCV" + LIGHT_GBM_REGRESSOR = "LightGBMRegressor" + LINEAR_REGRESSION = "LinearRegression" -class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class Status(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Status of update workspace quota. """ @@ -225,41 +1216,222 @@ class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku" OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion" -class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class StatusMessageLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + ERROR = "Error" + INFORMATION = "Information" + WARNING = "Warning" + +class StochasticOptimizer(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Stochastic optimizer for image models. + """ + + #: No optimizer selected. + NONE = "None" + #: Stochastic Gradient Descent optimizer. + SGD = "Sgd" + #: Adam is algorithm the optimizes stochastic objective functions based on adaptive estimates of + #: moments. + ADAM = "Adam" + #: AdamW is a variant of the optimizer Adam that has an improved implementation of weight decay. + ADAMW = "Adamw" + +class StorageAccountType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """type of this storage account. + """ + + STANDARD_LRS = "Standard_LRS" + PREMIUM_LRS = "Premium_LRS" + +class TargetAggregationFunction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target aggregate function. + """ + + #: Represent no value set. + NONE = "None" + SUM = "Sum" + MAX = "Max" + MIN = "Min" + MEAN = "Mean" + +class TargetLagsMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target lags selection modes. + """ + + #: Target lags to be determined automatically. + AUTO = "Auto" + #: Use the custom target lags. + CUSTOM = "Custom" + +class TargetRollingWindowSizeMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target rolling windows size mode. + """ + + #: Determine rolling windows size automatically. + AUTO = "Auto" + #: Use the specified rolling window size. + CUSTOM = "Custom" + +class TaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AutoMLJob Task type. + """ + + #: Classification in machine learning and statistics is a supervised learning approach in which + #: the computer program learns from the data given to it and make new observations or + #: classifications. + CLASSIFICATION = "Classification" + #: Regression means to predict the value using the input data. Regression models are used to + #: predict a continuous value. + REGRESSION = "Regression" + #: Forecasting is a special kind of regression task that deals with time-series data and creates + #: forecasting model + #: that can be used to predict the near future values based on the inputs. + FORECASTING = "Forecasting" + #: Image Classification. Multi-class image classification is used when an image is classified with + #: only a single label + #: from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' + #: or a 'duck'. + IMAGE_CLASSIFICATION = "ImageClassification" + #: Image Classification Multilabel. Multi-label image classification is used when an image could + #: have one or more labels + #: from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. + IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel" + #: Image Object Detection. Object detection is used to identify objects in an image and locate + #: each object with a + #: bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. + IMAGE_OBJECT_DETECTION = "ImageObjectDetection" + #: Image Instance Segmentation. Instance segmentation is used to identify objects in an image at + #: the pixel level, + #: drawing a polygon around each object in the image. + IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation" + #: Text classification (also known as text tagging or text categorization) is the process of + #: sorting texts into categories. + #: Categories are mutually exclusive. + TEXT_CLASSIFICATION = "TextClassification" + #: Multilabel classification task assigns each sample to a group (zero or more) of target labels. + TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel" + #: Text Named Entity Recognition a.k.a. TextNER. + #: Named Entity Recognition (NER) is the ability to take free-form text and identify the + #: occurrences of entities such as people, locations, organizations, and more. + TEXT_NER = "TextNER" + +class TextAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of text data. + """ + + CLASSIFICATION = "Classification" + NAMED_ENTITY_RECOGNITION = "NamedEntityRecognition" + +class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + + RECURRENCE = "Recurrence" + CRON = "Cron" + +class UnderlyingResourceAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): DELETE = "Delete" DETACH = "Detach" -class UnitOfMeasure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class UnitOfMeasure(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The unit of time measurement for the specified VM price. Example: OneHour """ ONE_HOUR = "OneHour" -class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class UsageUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enum describing the unit of usage measurement. """ COUNT = "Count" -class VMPriceOSType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class UseStl(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Configure STL Decomposition of the time-series target column. + """ + + #: No stl decomposition. + NONE = "None" + SEASON = "Season" + SEASON_TREND = "SeasonTrend" + +class ValidationMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Metric computation method to use for validation metrics in image tasks. + """ + + #: No metric. + NONE = "None" + #: Coco metric. + COCO = "Coco" + #: Voc metric. + VOC = "Voc" + #: CocoVoc metric. + COCO_VOC = "CocoVoc" + +class ValueFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """format for the workspace connection value + """ + + JSON = "JSON" + +class VMPriceOSType(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Operating system type used by the VM. """ LINUX = "Linux" WINDOWS = "Windows" -class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class VmPriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Virtual Machine priority """ DEDICATED = "Dedicated" LOW_PRIORITY = "LowPriority" -class VMTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class VMTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The type of the VM. """ STANDARD = "Standard" LOW_PRIORITY = "LowPriority" SPOT = "Spot" + +class VolumeDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe + """ + + BIND = "bind" + VOLUME = "volume" + TMPFS = "tmpfs" + NPIPE = "npipe" + +class WeekDay(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum of weekday + """ + + #: Monday weekday. + MONDAY = "Monday" + #: Tuesday weekday. + TUESDAY = "Tuesday" + #: Wednesday weekday. + WEDNESDAY = "Wednesday" + #: Thursday weekday. + THURSDAY = "Thursday" + #: Friday weekday. + FRIDAY = "Friday" + #: Saturday weekday. + SATURDAY = "Saturday" + #: Sunday weekday. + SUNDAY = "Sunday" + +class WorkspaceProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of workspace resource. The provisioningState is to indicate states + for resource provisioning. + """ + + UNKNOWN = "Unknown" + UPDATING = "Updating" + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + SOFT_DELETED = "SoftDeleted" diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py deleted file mode 100644 index d95b81b6554ee..0000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py +++ /dev/null @@ -1,3667 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.exceptions import HttpResponseError -import msrest.serialization - - -class Compute(msrest.serialization.Model): - """Machine Learning compute object. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, VirtualMachine. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - } - - _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'VirtualMachine': 'VirtualMachine'} - } - - def __init__( - self, - **kwargs - ): - super(Compute, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - - -class AKS(Compute): - """A Machine Learning compute based on AKS. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: AKS properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AKSProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AKSProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(AKS, self).__init__(**kwargs) - self.compute_type = 'AKS' # type: str - self.properties = kwargs.get('properties', None) - - -class ComputeSecrets(msrest.serialization.Model): - """Secrets related to a Machine Learning compute. Might differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'} - } - - def __init__( - self, - **kwargs - ): - super(ComputeSecrets, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - - -class AksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on AKS. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param user_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :type user_kube_config: str - :param admin_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :type admin_kube_config: str - :param image_pull_secret_name: Image registry pull secret. - :type image_pull_secret_name: str - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'AKS' # type: str - self.user_kube_config = kwargs.get('user_kube_config', None) - self.admin_kube_config = kwargs.get('admin_kube_config', None) - self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None) - - -class AksNetworkingConfiguration(msrest.serialization.Model): - """Advance configuration for AKS networking. - - :param subnet_id: Virtual network subnet resource ID the compute nodes belong to. - :type subnet_id: str - :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must - not overlap with any Subnet IP ranges. - :type service_cidr: str - :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within - the Kubernetes service address range specified in serviceCidr. - :type dns_service_ip: str - :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It - must not overlap with any Subnet IP ranges or the Kubernetes service address range. - :type docker_bridge_cidr: str - """ - - _validation = { - 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, - 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - } - - _attribute_map = { - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, - 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, - 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, - 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AksNetworkingConfiguration, self).__init__(**kwargs) - self.subnet_id = kwargs.get('subnet_id', None) - self.service_cidr = kwargs.get('service_cidr', None) - self.dns_service_ip = kwargs.get('dns_service_ip', None) - self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None) - - -class AKSProperties(msrest.serialization.Model): - """AKS properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param cluster_fqdn: Cluster full qualified domain name. - :type cluster_fqdn: str - :ivar system_services: System services. - :vartype system_services: list[~azure.mgmt.machinelearningservices.models.SystemService] - :param agent_count: Number of agents. - :type agent_count: int - :param agent_vm_size: Agent virtual machine size. - :type agent_vm_size: str - :param ssl_configuration: SSL configuration. - :type ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration - :param aks_networking_configuration: AKS networking configuration for vnet. - :type aks_networking_configuration: - ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration - """ - - _validation = { - 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 1}, - } - - _attribute_map = { - 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, - 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, - 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, - 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, - 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - super(AKSProperties, self).__init__(**kwargs) - self.cluster_fqdn = kwargs.get('cluster_fqdn', None) - self.system_services = None - self.agent_count = kwargs.get('agent_count', None) - self.agent_vm_size = kwargs.get('agent_vm_size', None) - self.ssl_configuration = kwargs.get('ssl_configuration', None) - self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None) - - -class AmlCompute(Compute): - """An Azure Machine Learning compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: AML Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlCompute, self).__init__(**kwargs) - self.compute_type = 'AmlCompute' # type: str - self.properties = kwargs.get('properties', None) - - -class AmlComputeNodeInformation(msrest.serialization.Model): - """Compute node information related to a AmlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar node_id: ID of the compute node. - :vartype node_id: str - :ivar private_ip_address: Private IP address of the compute node. - :vartype private_ip_address: str - :ivar public_ip_address: Public IP address of the compute node. - :vartype public_ip_address: str - :ivar port: SSH port number of the node. - :vartype port: int - :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, - leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", - "leaving", "preempted". - :vartype node_state: str or ~azure.mgmt.machinelearningservices.models.NodeState - :ivar run_id: ID of the Experiment running on the node, if any else null. - :vartype run_id: str - """ - - _validation = { - 'node_id': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'port': {'readonly': True}, - 'node_state': {'readonly': True}, - 'run_id': {'readonly': True}, - } - - _attribute_map = { - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, - 'node_state': {'key': 'nodeState', 'type': 'str'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlComputeNodeInformation, self).__init__(**kwargs) - self.node_id = None - self.private_ip_address = None - self.public_ip_address = None - self.port = None - self.node_state = None - self.run_id = None - - -class ComputeNodesInformation(msrest.serialization.Model): - """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlComputeNodesInformation. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str - """ - - _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} - } - - def __init__( - self, - **kwargs - ): - super(ComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - self.next_link = None - - -class AmlComputeNodesInformation(ComputeNodesInformation): - """Compute node information related to a AmlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str - :ivar nodes: The collection of returned AmlCompute nodes details. - :vartype nodes: list[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] - """ - - _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, - 'nodes': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = 'AmlCompute' # type: str - self.nodes = None - - -class AmlComputeProperties(msrest.serialization.Model): - """AML Compute properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated", - "LowPriority". - :type vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority - :param scale_settings: Scale settings for AML Compute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - :param user_account_credentials: Credentials for an administrator user account that will be - created on each compute node. - :type user_account_credentials: - ~azure.mgmt.machinelearningservices.models.UserAccountCredentials - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param remote_login_port_public_access: State of the public SSH port. Possible values are: - Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - - Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, - else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", - "NotSpecified". Default value: "NotSpecified". - :type remote_login_port_public_access: str or - ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess - :ivar allocation_state: Allocation state of the compute. Possible values are: steady - - Indicates that the compute is not resizing. There are no changes to the number of compute nodes - in the compute in progress. A compute enters this state when it is created and when no - operations are being performed on the compute to change the number of compute nodes. resizing - - Indicates that the compute is resizing; that is, compute nodes are being added to or removed - from the compute. Possible values include: "Steady", "Resizing". - :vartype allocation_state: str or ~azure.mgmt.machinelearningservices.models.AllocationState - :ivar allocation_state_transition_time: The time at which the compute entered its current - allocation state. - :vartype allocation_state_transition_time: ~datetime.datetime - :ivar errors: Collection of errors encountered by various compute nodes during node setup. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar current_node_count: The number of compute nodes currently assigned to the compute. - :vartype current_node_count: int - :ivar target_node_count: The target number of compute nodes for the compute. If the - allocationState is resizing, this property denotes the target node count for the ongoing resize - operation. If the allocationState is steady, this property denotes the target node count for - the previous resize operation. - :vartype target_node_count: int - :ivar node_state_counts: Counts of various node states on the compute. - :vartype node_state_counts: ~azure.mgmt.machinelearningservices.models.NodeStateCounts - """ - - _validation = { - 'allocation_state': {'readonly': True}, - 'allocation_state_transition_time': {'readonly': True}, - 'errors': {'readonly': True}, - 'current_node_count': {'readonly': True}, - 'target_node_count': {'readonly': True}, - 'node_state_counts': {'readonly': True}, - } - - _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, - 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, - 'allocation_state': {'key': 'allocationState', 'type': 'str'}, - 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, - 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, - 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlComputeProperties, self).__init__(**kwargs) - self.vm_size = kwargs.get('vm_size', None) - self.vm_priority = kwargs.get('vm_priority', None) - self.scale_settings = kwargs.get('scale_settings', None) - self.user_account_credentials = kwargs.get('user_account_credentials', None) - self.subnet = kwargs.get('subnet', None) - self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified") - self.allocation_state = None - self.allocation_state_transition_time = None - self.errors = None - self.current_node_count = None - self.target_node_count = None - self.node_state_counts = None - - -class AmlUserFeature(msrest.serialization.Model): - """Features enabled for a workspace. - - :param id: Specifies the feature ID. - :type id: str - :param display_name: Specifies the feature name. - :type display_name: str - :param description: Describes the feature for user experience. - :type description: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(AmlUserFeature, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.display_name = kwargs.get('display_name', None) - self.description = kwargs.get('description', None) - - -class ClusterUpdateParameters(msrest.serialization.Model): - """AmlCompute update parameters. - - :param scale_settings: Desired scale settings for the amlCompute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - """ - - _attribute_map = { - 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, - } - - def __init__( - self, - **kwargs - ): - super(ClusterUpdateParameters, self).__init__(**kwargs) - self.scale_settings = kwargs.get('scale_settings', None) - - -class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model): - """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar principal_id: The principal id of user assigned identity. - :vartype principal_id: str - :ivar client_id: The client id of user assigned identity. - :vartype client_id: str - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'client_id': {'readonly': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs) - self.principal_id = None - self.client_id = None - - -class ComputeInstance(Compute): - """An Azure Machine Learning compute instance. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: Compute Instance properties. - :type properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstance, self).__init__(**kwargs) - self.compute_type = 'ComputeInstance' # type: str - self.properties = kwargs.get('properties', None) - - -class ComputeInstanceApplication(msrest.serialization.Model): - """Defines an Aml Instance application and its connectivity endpoint URI. - - :param display_name: Name of the ComputeInstance application. - :type display_name: str - :param endpoint_uri: Application' endpoint URI. - :type endpoint_uri: str - """ - - _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceApplication, self).__init__(**kwargs) - self.display_name = kwargs.get('display_name', None) - self.endpoint_uri = kwargs.get('endpoint_uri', None) - - -class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): - """Defines all connectivity endpoints and properties for a ComputeInstance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar public_ip_address: Public IP Address of this ComputeInstance. - :vartype public_ip_address: str - :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in - which the compute instance is deployed). - :vartype private_ip_address: str - """ - - _validation = { - 'public_ip_address': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - } - - _attribute_map = { - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) - self.public_ip_address = None - self.private_ip_address = None - - -class ComputeInstanceCreatedBy(msrest.serialization.Model): - """Describes information on user who created this ComputeInstance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar user_name: Name of the user. - :vartype user_name: str - :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. - :vartype user_org_id: str - :ivar user_id: Uniquely identifies the user within his/her organization. - :vartype user_id: str - """ - - _validation = { - 'user_name': {'readonly': True}, - 'user_org_id': {'readonly': True}, - 'user_id': {'readonly': True}, - } - - _attribute_map = { - 'user_name': {'key': 'userName', 'type': 'str'}, - 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, - 'user_id': {'key': 'userId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceCreatedBy, self).__init__(**kwargs) - self.user_name = None - self.user_org_id = None - self.user_id = None - - -class ComputeInstanceLastOperation(msrest.serialization.Model): - """The last operation on ComputeInstance. - - :param operation_name: Name of the last operation. Possible values include: "Create", "Start", - "Stop", "Restart", "Reimage", "Delete". - :type operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName - :param operation_time: Time of the last operation. - :type operation_time: ~datetime.datetime - :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded", - "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed". - :type operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus - """ - - _attribute_map = { - 'operation_name': {'key': 'operationName', 'type': 'str'}, - 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, - 'operation_status': {'key': 'operationStatus', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceLastOperation, self).__init__(**kwargs) - self.operation_name = kwargs.get('operation_name', None) - self.operation_time = kwargs.get('operation_time', None) - self.operation_status = kwargs.get('operation_status', None) - - -class ComputeInstanceProperties(msrest.serialization.Model): - """Compute Instance properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param application_sharing_policy: Policy for sharing applications on this compute instance - among users of parent workspace. If Personal, only the creator can access applications on this - compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". - :type application_sharing_policy: str or - ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy - :param ssh_settings: Specifies policy and settings for SSH access. - :type ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings - :ivar connectivity_endpoints: Describes all connectivity endpoints available for this - ComputeInstance. - :vartype connectivity_endpoints: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints - :ivar applications: Describes available applications and their endpoints on this - ComputeInstance. - :vartype applications: - list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] - :ivar created_by: Describes information on user who created this ComputeInstance. - :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy - :ivar errors: Collection of errors encountered on this ComputeInstance. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", - "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed", - "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable". - :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState - :ivar last_operation: The last operation on ComputeInstance. - :vartype last_operation: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation - """ - - _validation = { - 'connectivity_endpoints': {'readonly': True}, - 'applications': {'readonly': True}, - 'created_by': {'readonly': True}, - 'errors': {'readonly': True}, - 'state': {'readonly': True}, - 'last_operation': {'readonly': True}, - } - - _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, - 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, - 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, - 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, - 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, - 'state': {'key': 'state', 'type': 'str'}, - 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceProperties, self).__init__(**kwargs) - self.vm_size = kwargs.get('vm_size', None) - self.subnet = kwargs.get('subnet', None) - self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared") - self.ssh_settings = kwargs.get('ssh_settings', None) - self.connectivity_endpoints = None - self.applications = None - self.created_by = None - self.errors = None - self.state = None - self.last_operation = None - - -class ComputeInstanceSshSettings(msrest.serialization.Model): - """Specifies policy and settings for SSH access. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param ssh_public_access: State of the public SSH port. Possible values are: Disabled - - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". - :type ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess - :ivar admin_user_name: Describes the admin user name. - :vartype admin_user_name: str - :ivar ssh_port: Describes the port for connecting through SSH. - :vartype ssh_port: int - :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t - rsa -b 2048" to generate your SSH key pairs. - :type admin_public_key: str - """ - - _validation = { - 'admin_user_name': {'readonly': True}, - 'ssh_port': {'readonly': True}, - } - - _attribute_map = { - 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeInstanceSshSettings, self).__init__(**kwargs) - self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled") - self.admin_user_name = None - self.ssh_port = None - self.admin_public_key = kwargs.get('admin_public_key', None) - - -class Resource(msrest.serialization.Model): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.type = None - self.tags = kwargs.get('tags', None) - self.sku = kwargs.get('sku', None) - - -class ComputeResource(Resource): - """Machine Learning compute object wrapped into ARM resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param properties: Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.Compute - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'properties': {'key': 'properties', 'type': 'Compute'}, - } - - def __init__( - self, - **kwargs - ): - super(ComputeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class Databricks(Compute): - """A DataFactory compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(Databricks, self).__init__(**kwargs) - self.compute_type = 'Databricks' # type: str - self.properties = kwargs.get('properties', None) - - -class DatabricksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on Databricks. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param databricks_access_token: access token for databricks account. - :type databricks_access_token: str - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'Databricks' # type: str - self.databricks_access_token = kwargs.get('databricks_access_token', None) - - -class DatabricksProperties(msrest.serialization.Model): - """DatabricksProperties. - - :param databricks_access_token: Databricks access token. - :type databricks_access_token: str - """ - - _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - - -class DataFactory(Compute): - """A DataFactory compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(DataFactory, self).__init__(**kwargs) - self.compute_type = 'DataFactory' # type: str - - -class DataLakeAnalytics(Compute): - """A DataLakeAnalytics compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(DataLakeAnalytics, self).__init__(**kwargs) - self.compute_type = 'DataLakeAnalytics' # type: str - self.properties = kwargs.get('properties', None) - - -class DataLakeAnalyticsProperties(msrest.serialization.Model): - """DataLakeAnalyticsProperties. - - :param data_lake_store_account_name: DataLake Store Account Name. - :type data_lake_store_account_name: str - """ - - _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataLakeAnalyticsProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None) - - -class EncryptionProperty(msrest.serialization.Model): - """EncryptionProperty. - - All required parameters must be populated in order to send to Azure. - - :param status: Required. Indicates whether or not the encryption is enabled for the workspace. - Possible values include: "Enabled", "Disabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :param key_vault_properties: Required. Customer Key vault properties. - :type key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties - """ - - _validation = { - 'status': {'required': True}, - 'key_vault_properties': {'required': True}, - } - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(EncryptionProperty, self).__init__(**kwargs) - self.status = kwargs['status'] - self.key_vault_properties = kwargs['key_vault_properties'] - - -class ErrorDetail(msrest.serialization.Model): - """Error detail information. - - All required parameters must be populated in order to send to Azure. - - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - """ - - _validation = { - 'code': {'required': True}, - 'message': {'required': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorDetail, self).__init__(**kwargs) - self.code = kwargs['code'] - self.message = kwargs['message'] - - -class ErrorResponse(msrest.serialization.Model): - """Error response information. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: Error code. - :vartype code: str - :ivar message: Error message. - :vartype message: str - :ivar details: An array of error detail objects. - :vartype details: list[~azure.mgmt.machinelearningservices.models.ErrorDetail] - """ - - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'details': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorResponse, self).__init__(**kwargs) - self.code = None - self.message = None - self.details = None - - -class EstimatedVMPrice(msrest.serialization.Model): - """The estimated price info for using a VM of a particular OS type, tier, etc. - - All required parameters must be populated in order to send to Azure. - - :param retail_price: Required. The price charged for using the VM. - :type retail_price: float - :param os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". - :type os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :param vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". - :type vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier - """ - - _validation = { - 'retail_price': {'required': True}, - 'os_type': {'required': True}, - 'vm_tier': {'required': True}, - } - - _attribute_map = { - 'retail_price': {'key': 'retailPrice', 'type': 'float'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_tier': {'key': 'vmTier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EstimatedVMPrice, self).__init__(**kwargs) - self.retail_price = kwargs['retail_price'] - self.os_type = kwargs['os_type'] - self.vm_tier = kwargs['vm_tier'] - - -class EstimatedVMPrices(msrest.serialization.Model): - """The estimated price info for using a VM. - - All required parameters must be populated in order to send to Azure. - - :param billing_currency: Required. Three lettered code specifying the currency of the VM price. - Example: USD. Possible values include: "USD". - :type billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :param unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". - :type unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :param values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. - :type values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] - """ - - _validation = { - 'billing_currency': {'required': True}, - 'unit_of_measure': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, - 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, - } - - def __init__( - self, - **kwargs - ): - super(EstimatedVMPrices, self).__init__(**kwargs) - self.billing_currency = kwargs['billing_currency'] - self.unit_of_measure = kwargs['unit_of_measure'] - self.values = kwargs['values'] - - -class HDInsight(Compute): - """A HDInsight compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(HDInsight, self).__init__(**kwargs) - self.compute_type = 'HDInsight' # type: str - self.properties = kwargs.get('properties', None) - - -class HDInsightProperties(msrest.serialization.Model): - """HDInsightProperties. - - :param ssh_port: Port open for ssh connections on the master node of the cluster. - :type ssh_port: int - :param address: Public IP address of the master node of the cluster. - :type address: str - :param administrator_account: Admin credentials for master node of the cluster. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = kwargs.get('ssh_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) - - -class Identity(msrest.serialization.Model): - """Identity for the resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar principal_id: The principal ID of resource identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of resource. - :vartype tenant_id: str - :param type: Required. The identity type. Possible values include: "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned", "None". - :type type: str or ~azure.mgmt.machinelearningservices.models.ResourceIdentityType - :param user_assigned_identities: The list of user identities associated with resource. The user - identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. - :type user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, - } - - def __init__( - self, - **kwargs - ): - super(Identity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = kwargs['type'] - self.user_assigned_identities = kwargs.get('user_assigned_identities', None) - - -class KeyVaultProperties(msrest.serialization.Model): - """KeyVaultProperties. - - All required parameters must be populated in order to send to Azure. - - :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned - encryption key is present. - :type key_vault_arm_id: str - :param key_identifier: Required. Key vault uri to access the encryption key. - :type key_identifier: str - :param identity_client_id: For future use - The client id of the identity which will be used to - access key vault. - :type identity_client_id: str - """ - - _validation = { - 'key_vault_arm_id': {'required': True}, - 'key_identifier': {'required': True}, - } - - _attribute_map = { - 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(KeyVaultProperties, self).__init__(**kwargs) - self.key_vault_arm_id = kwargs['key_vault_arm_id'] - self.key_identifier = kwargs['key_identifier'] - self.identity_client_id = kwargs.get('identity_client_id', None) - - -class ListAmlUserFeatureResult(msrest.serialization.Model): - """The List Aml user feature operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of AML user facing features. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] - :ivar next_link: The URI to fetch the next page of AML user features information. Call - ListNext() with this to fetch the next page of AML user features information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ListAmlUserFeatureResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ListUsagesResult(msrest.serialization.Model): - """The List Usages operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of AML resource usages. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] - :ivar next_link: The URI to fetch the next page of AML resource usage information. Call - ListNext() with this to fetch the next page of AML resource usage information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Usage]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ListUsagesResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ListWorkspaceKeysResult(msrest.serialization.Model): - """ListWorkspaceKeysResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar user_storage_key: - :vartype user_storage_key: str - :ivar user_storage_resource_id: - :vartype user_storage_resource_id: str - :ivar app_insights_instrumentation_key: - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult - :param notebook_access_keys: - :type notebook_access_keys: - ~azure.mgmt.machinelearningservices.models.NotebookListCredentialsResult - """ - - _validation = { - 'user_storage_key': {'readonly': True}, - 'user_storage_resource_id': {'readonly': True}, - 'app_insights_instrumentation_key': {'readonly': True}, - 'container_registry_credentials': {'readonly': True}, - } - - _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, - 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'}, - } - - def __init__( - self, - **kwargs - ): - super(ListWorkspaceKeysResult, self).__init__(**kwargs) - self.user_storage_key = None - self.user_storage_resource_id = None - self.app_insights_instrumentation_key = None - self.container_registry_credentials = None - self.notebook_access_keys = kwargs.get('notebook_access_keys', None) - - -class ListWorkspaceQuotas(msrest.serialization.Model): - """The List WorkspaceQuotasByVMFamily operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of Workspace Quotas by VM Family. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] - :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. - Call ListNext() with this to fetch the next page of Workspace Quota information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ResourceQuota]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ListWorkspaceQuotas, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class MachineLearningServiceError(msrest.serialization.Model): - """Wrapper for error response to follow ARM guidelines. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: The error response. - :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse - """ - - _validation = { - 'error': {'readonly': True}, - } - - _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorResponse'}, - } - - def __init__( - self, - **kwargs - ): - super(MachineLearningServiceError, self).__init__(**kwargs) - self.error = None - - -class NodeStateCounts(msrest.serialization.Model): - """Counts of various compute node states on the amlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar idle_node_count: Number of compute nodes in idle state. - :vartype idle_node_count: int - :ivar running_node_count: Number of compute nodes which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Number of compute nodes which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Number of compute nodes which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Number of compute nodes which are in preempted state. - :vartype preempted_node_count: int - """ - - _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, - } - - _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None - - -class NotebookListCredentialsResult(msrest.serialization.Model): - """NotebookListCredentialsResult. - - :param primary_access_key: - :type primary_access_key: str - :param secondary_access_key: - :type secondary_access_key: str - """ - - _attribute_map = { - 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, - 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookListCredentialsResult, self).__init__(**kwargs) - self.primary_access_key = kwargs.get('primary_access_key', None) - self.secondary_access_key = kwargs.get('secondary_access_key', None) - - -class NotebookPreparationError(msrest.serialization.Model): - """NotebookPreparationError. - - :param error_message: - :type error_message: str - :param status_code: - :type status_code: int - """ - - _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'status_code': {'key': 'statusCode', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookPreparationError, self).__init__(**kwargs) - self.error_message = kwargs.get('error_message', None) - self.status_code = kwargs.get('status_code', None) - - -class NotebookResourceInfo(msrest.serialization.Model): - """NotebookResourceInfo. - - :param fqdn: - :type fqdn: str - :param resource_id: the data plane resourceId that used to initialize notebook component. - :type resource_id: str - :param notebook_preparation_error: The error that occurs when preparing notebook. - :type notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError - """ - - _attribute_map = { - 'fqdn': {'key': 'fqdn', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, - } - - def __init__( - self, - **kwargs - ): - super(NotebookResourceInfo, self).__init__(**kwargs) - self.fqdn = kwargs.get('fqdn', None) - self.resource_id = kwargs.get('resource_id', None) - self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None) - - -class Operation(msrest.serialization.Model): - """Azure Machine Learning workspace REST API operation. - - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param display: Display name of operation. - :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - } - - def __init__( - self, - **kwargs - ): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display = kwargs.get('display', None) - - -class OperationDisplay(msrest.serialization.Model): - """Display name of operation. - - :param provider: The resource provider name: Microsoft.MachineLearningExperimentation. - :type provider: str - :param resource: The resource on which the operation is performed. - :type resource: str - :param operation: The operation that users can perform. - :type operation: str - :param description: The description for the operation. - :type description: str - """ - - _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) - self.description = kwargs.get('description', None) - - -class OperationListResult(msrest.serialization.Model): - """An array of operations supported by the resource provider. - - :param value: List of AML workspace operations supported by the AML workspace resource - provider. - :type value: list[~azure.mgmt.machinelearningservices.models.Operation] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class PaginatedComputeResourcesList(msrest.serialization.Model): - """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. - - :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope. - :type value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ComputeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PaginatedComputeResourcesList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class PaginatedWorkspaceConnectionsList(msrest.serialization.Model): - """Paginated list of Workspace connection objects. - - :param value: An array of Workspace connection objects. - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceConnection] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceConnection]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class Password(msrest.serialization.Model): - """Password. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: - :vartype name: str - :ivar value: - :vartype value: str - """ - - _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Password, self).__init__(**kwargs) - self.name = None - self.value = None - - -class PrivateEndpoint(msrest.serialization.Model): - """The Private Endpoint resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The ARM identifier for Private Endpoint. - :vartype id: str - """ - - _validation = { - 'id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpoint, self).__init__(**kwargs) - self.id = None - - -class PrivateEndpointConnection(msrest.serialization.Model): - """The Private Endpoint Connection resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: ResourceId of the private endpoint connection. - :vartype id: str - :ivar name: Friendly name of the private endpoint connection. - :vartype name: str - :ivar type: Resource type of private endpoint connection. - :vartype type: str - :param private_endpoint: The resource of private end point. - :type private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint - :param private_link_service_connection_state: A collection of information about the state of - the connection between service consumer and provider. - :type private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The provisioning state of the private endpoint connection resource. - Possible values include: "Succeeded", "Creating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.private_endpoint = kwargs.get('private_endpoint', None) - self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) - self.provisioning_state = None - - -class PrivateLinkResource(Resource): - """A private link resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar group_id: The private link resource group id. - :vartype group_id: str - :ivar required_members: The private link resource required member names. - :vartype required_members: list[str] - :param required_zone_names: The private link resource Private link DNS zone name. - :type required_zone_names: list[str] - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'group_id': {'readonly': True}, - 'required_members': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResource, self).__init__(**kwargs) - self.group_id = None - self.required_members = None - self.required_zone_names = kwargs.get('required_zone_names', None) - - -class PrivateLinkResourceListResult(msrest.serialization.Model): - """A list of private link resources. - - :param value: Array of private link resources. - :type value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResourceListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class PrivateLinkServiceConnectionState(msrest.serialization.Model): - """A collection of information about the state of the connection between service consumer and provider. - - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - :param description: The reason for approval/rejection of the connection. - :type description: str - :param actions_required: A message indicating if changes on the service provider require any - updates on the consumer. - :type actions_required: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.description = kwargs.get('description', None) - self.actions_required = kwargs.get('actions_required', None) - - -class QuotaBaseProperties(msrest.serialization.Model): - """The properties for Quota update or retrieval. - - :param id: Specifies the resource ID. - :type id: str - :param type: Specifies the resource type. - :type type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :param unit: An enum describing the unit of quota measurement. Possible values include: - "Count". - :type unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(QuotaBaseProperties, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.type = kwargs.get('type', None) - self.limit = kwargs.get('limit', None) - self.unit = kwargs.get('unit', None) - - -class QuotaUpdateParameters(msrest.serialization.Model): - """Quota update parameters. - - :param value: The list for update quota. - :type value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, - } - - def __init__( - self, - **kwargs - ): - super(QuotaUpdateParameters, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class RegistryListCredentialsResult(msrest.serialization.Model): - """RegistryListCredentialsResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar location: - :vartype location: str - :ivar username: - :vartype username: str - :param passwords: - :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] - """ - - _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, - } - - _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, - } - - def __init__( - self, - **kwargs - ): - super(RegistryListCredentialsResult, self).__init__(**kwargs) - self.location = None - self.username = None - self.passwords = kwargs.get('passwords', None) - - -class ResourceId(msrest.serialization.Model): - """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. - - All required parameters must be populated in order to send to Azure. - - :param id: Required. The ID of the resource. - :type id: str - """ - - _validation = { - 'id': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceId, self).__init__(**kwargs) - self.id = kwargs['id'] - - -class ResourceName(msrest.serialization.Model): - """The Resource Name. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str - """ - - _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class ResourceQuota(msrest.serialization.Model): - """The quota assigned to a resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar name: Name of the resource. - :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName - :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'name': {'readonly': True}, - 'limit': {'readonly': True}, - 'unit': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'ResourceName'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceQuota, self).__init__(**kwargs) - self.id = None - self.type = None - self.name = None - self.limit = None - self.unit = None - - -class ResourceSkuLocationInfo(msrest.serialization.Model): - """ResourceSkuLocationInfo. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar location: Location of the SKU. - :vartype location: str - :ivar zones: List of availability zones where the SKU is supported. - :vartype zones: list[str] - :ivar zone_details: Details of capabilities available to a SKU in specific zones. - :vartype zone_details: list[~azure.mgmt.machinelearningservices.models.ResourceSkuZoneDetails] - """ - - _validation = { - 'location': {'readonly': True}, - 'zones': {'readonly': True}, - 'zone_details': {'readonly': True}, - } - - _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'zones': {'key': 'zones', 'type': '[str]'}, - 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceSkuLocationInfo, self).__init__(**kwargs) - self.location = None - self.zones = None - self.zone_details = None - - -class ResourceSkuZoneDetails(msrest.serialization.Model): - """Describes The zonal capabilities of a SKU. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The set of zones that the SKU is available in with the specified capabilities. - :vartype name: list[str] - :ivar capabilities: A list of capabilities that are available for the SKU in the specified list - of zones. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] - """ - - _validation = { - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': '[str]'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, - } - - def __init__( - self, - **kwargs - ): - super(ResourceSkuZoneDetails, self).__init__(**kwargs) - self.name = None - self.capabilities = None - - -class Restriction(msrest.serialization.Model): - """The restriction because of which SKU cannot be used. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar type: The type of restrictions. As of now only possible value for this is location. - :vartype type: str - :ivar values: The value of restrictions. If the restriction type is set to location. This would - be different locations where the SKU is restricted. - :vartype values: list[str] - :param reason_code: The reason for the restriction. Possible values include: "NotSpecified", - "NotAvailableForRegion", "NotAvailableForSubscription". - :type reason_code: str or ~azure.mgmt.machinelearningservices.models.ReasonCode - """ - - _validation = { - 'type': {'readonly': True}, - 'values': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - 'reason_code': {'key': 'reasonCode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Restriction, self).__init__(**kwargs) - self.type = None - self.values = None - self.reason_code = kwargs.get('reason_code', None) - - -class ScaleSettings(msrest.serialization.Model): - """scale settings for AML Compute. - - All required parameters must be populated in order to send to Azure. - - :param max_node_count: Required. Max number of nodes to use. - :type max_node_count: int - :param min_node_count: Min number of nodes to use. - :type min_node_count: int - :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. - :type node_idle_time_before_scale_down: ~datetime.timedelta - """ - - _validation = { - 'max_node_count': {'required': True}, - } - - _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = kwargs['max_node_count'] - self.min_node_count = kwargs.get('min_node_count', 0) - self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None) - - -class ServicePrincipalCredentials(msrest.serialization.Model): - """Service principal credentials. - - All required parameters must be populated in order to send to Azure. - - :param client_id: Required. Client Id. - :type client_id: str - :param client_secret: Required. Client secret. - :type client_secret: str - """ - - _validation = { - 'client_id': {'required': True}, - 'client_secret': {'required': True}, - } - - _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ServicePrincipalCredentials, self).__init__(**kwargs) - self.client_id = kwargs['client_id'] - self.client_secret = kwargs['client_secret'] - - -class SharedPrivateLinkResource(msrest.serialization.Model): - """SharedPrivateLinkResource. - - :param name: Unique name of the private link. - :type name: str - :param private_link_resource_id: The resource id that private link links to. - :type private_link_resource_id: str - :param group_id: The private link resource group id. - :type group_id: str - :param request_message: Request message. - :type request_message: str - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'status': {'key': 'properties.status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SharedPrivateLinkResource, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.private_link_resource_id = kwargs.get('private_link_resource_id', None) - self.group_id = kwargs.get('group_id', None) - self.request_message = kwargs.get('request_message', None) - self.status = kwargs.get('status', None) - - -class Sku(msrest.serialization.Model): - """Sku of the resource. - - :param name: Name of the sku. - :type name: str - :param tier: Tier of the sku like Basic or Enterprise. - :type tier: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Sku, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.tier = kwargs.get('tier', None) - - -class SKUCapability(msrest.serialization.Model): - """Features/user capabilities associated with the sku. - - :param name: Capability/Feature ID. - :type name: str - :param value: Details about the feature/capability. - :type value: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SKUCapability, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.value = kwargs.get('value', None) - - -class SkuListResult(msrest.serialization.Model): - """List of skus with features. - - :param value: - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceSku] - :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this - URI to fetch the next page of Workspace Skus. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceSku]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class SkuSettings(msrest.serialization.Model): - """Describes Workspace Sku details and features. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar locations: The set of locations that the SKU is available. This will be supported and - registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). - :vartype locations: list[str] - :ivar location_info: A list of locations and availability zones in those locations where the - SKU is available. - :vartype location_info: - list[~azure.mgmt.machinelearningservices.models.ResourceSkuLocationInfo] - :ivar tier: Sku Tier like Basic or Enterprise. - :vartype tier: str - :ivar resource_type: - :vartype resource_type: str - :ivar name: - :vartype name: str - :ivar capabilities: List of features/user capabilities associated with the sku. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] - :param restrictions: The restrictions because of which SKU cannot be used. This is empty if - there are no restrictions. - :type restrictions: list[~azure.mgmt.machinelearningservices.models.Restriction] - """ - - _validation = { - 'locations': {'readonly': True}, - 'location_info': {'readonly': True}, - 'tier': {'readonly': True}, - 'resource_type': {'readonly': True}, - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, - } - - _attribute_map = { - 'locations': {'key': 'locations', 'type': '[str]'}, - 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, - 'tier': {'key': 'tier', 'type': 'str'}, - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, - 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuSettings, self).__init__(**kwargs) - self.locations = None - self.location_info = None - self.tier = None - self.resource_type = None - self.name = None - self.capabilities = None - self.restrictions = kwargs.get('restrictions', None) - - -class SslConfiguration(msrest.serialization.Model): - """The ssl configuration for scoring. - - :param status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.SslConfigurationStatus - :param cert: Cert data. - :type cert: str - :param key: Key data. - :type key: str - :param cname: CNAME of the cert. - :type cname: str - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SslConfiguration, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.cert = kwargs.get('cert', None) - self.key = kwargs.get('key', None) - self.cname = kwargs.get('cname', None) - - -class SystemService(msrest.serialization.Model): - """A system service running on a compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar system_service_type: The type of this system service. - :vartype system_service_type: str - :ivar public_ip_address: Public IP address. - :vartype public_ip_address: str - :ivar version: The version for this type. - :vartype version: str - """ - - _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SystemService, self).__init__(**kwargs) - self.system_service_type = None - self.public_ip_address = None - self.version = None - - -class UpdateWorkspaceQuotas(msrest.serialization.Model): - """The properties for update Quota response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - :param status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". - :type status: str or ~azure.mgmt.machinelearningservices.models.Status - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UpdateWorkspaceQuotas, self).__init__(**kwargs) - self.id = None - self.type = None - self.limit = kwargs.get('limit', None) - self.unit = None - self.status = kwargs.get('status', None) - - -class UpdateWorkspaceQuotasResult(msrest.serialization.Model): - """The result of update workspace quota. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of workspace quota update result. - :vartype value: list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] - :ivar next_link: The URI to fetch the next page of workspace quota update result. Call - ListNext() with this to fetch the next page of Workspace Quota update result. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class Usage(msrest.serialization.Model): - """Describes AML Resource Usage. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.UsageUnit - :ivar current_value: The current usage of the resource. - :vartype current_value: long - :ivar limit: The maximum permitted usage of the resource. - :vartype limit: long - :ivar name: The name of the type of usage. - :vartype name: ~azure.mgmt.machinelearningservices.models.UsageName - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - 'current_value': {'readonly': True}, - 'limit': {'readonly': True}, - 'name': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'current_value': {'key': 'currentValue', 'type': 'long'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'UsageName'}, - } - - def __init__( - self, - **kwargs - ): - super(Usage, self).__init__(**kwargs) - self.id = None - self.type = None - self.unit = None - self.current_value = None - self.limit = None - self.name = None - - -class UsageName(msrest.serialization.Model): - """The Usage Names. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str - """ - - _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UsageName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class UserAccountCredentials(msrest.serialization.Model): - """Settings for user account that gets created on each on the nodes of a compute. - - All required parameters must be populated in order to send to Azure. - - :param admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. - :type admin_user_name: str - :param admin_user_ssh_public_key: SSH public key of the administrator user account. - :type admin_user_ssh_public_key: str - :param admin_user_password: Password of the administrator user account. - :type admin_user_password: str - """ - - _validation = { - 'admin_user_name': {'required': True}, - } - - _attribute_map = { - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, - 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(UserAccountCredentials, self).__init__(**kwargs) - self.admin_user_name = kwargs['admin_user_name'] - self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None) - self.admin_user_password = kwargs.get('admin_user_password', None) - - -class VirtualMachine(Compute): - """A Machine Learning compute based on Azure Virtual Machines. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachine, self).__init__(**kwargs) - self.compute_type = 'VirtualMachine' # type: str - self.properties = kwargs.get('properties', None) - - -class VirtualMachineProperties(msrest.serialization.Model): - """VirtualMachineProperties. - - :param virtual_machine_size: Virtual Machine size. - :type virtual_machine_size: str - :param ssh_port: Port open for ssh connections. - :type ssh_port: int - :param address: Public IP address of the virtual machine. - :type address: str - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _attribute_map = { - 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineProperties, self).__init__(**kwargs) - self.virtual_machine_size = kwargs.get('virtual_machine_size', None) - self.ssh_port = kwargs.get('ssh_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) - - -class VirtualMachineSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on AKS. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSecrets, self).__init__(**kwargs) - self.compute_type = 'VirtualMachine' # type: str - self.administrator_account = kwargs.get('administrator_account', None) - - -class VirtualMachineSize(msrest.serialization.Model): - """Describes the properties of a VM size. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of the virtual machine size. - :vartype name: str - :ivar family: The family name of the virtual machine size. - :vartype family: str - :ivar v_cp_us: The number of vCPUs supported by the virtual machine size. - :vartype v_cp_us: int - :ivar gpus: The number of gPUs supported by the virtual machine size. - :vartype gpus: int - :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size. - :vartype os_vhd_size_mb: int - :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine - size. - :vartype max_resource_volume_mb: int - :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size. - :vartype memory_gb: float - :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs. - :vartype low_priority_capable: bool - :ivar premium_io: Specifies if the virtual machine size supports premium IO. - :vartype premium_io: bool - :param estimated_vm_prices: The estimated price information for using a VM. - :type estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices - :param supported_compute_types: Specifies the compute types supported by the virtual machine - size. - :type supported_compute_types: list[str] - """ - - _validation = { - 'name': {'readonly': True}, - 'family': {'readonly': True}, - 'v_cp_us': {'readonly': True}, - 'gpus': {'readonly': True}, - 'os_vhd_size_mb': {'readonly': True}, - 'max_resource_volume_mb': {'readonly': True}, - 'memory_gb': {'readonly': True}, - 'low_priority_capable': {'readonly': True}, - 'premium_io': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, - 'gpus': {'key': 'gpus', 'type': 'int'}, - 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, - 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, - 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, - 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, - 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, - 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, - 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSize, self).__init__(**kwargs) - self.name = None - self.family = None - self.v_cp_us = None - self.gpus = None - self.os_vhd_size_mb = None - self.max_resource_volume_mb = None - self.memory_gb = None - self.low_priority_capable = None - self.premium_io = None - self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None) - self.supported_compute_types = kwargs.get('supported_compute_types', None) - - -class VirtualMachineSizeListResult(msrest.serialization.Model): - """The List Virtual Machine size operation response. - - :param aml_compute: The list of virtual machine sizes supported by AmlCompute. - :type aml_compute: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] - """ - - _attribute_map = { - 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.aml_compute = kwargs.get('aml_compute', None) - - -class VirtualMachineSshCredentials(msrest.serialization.Model): - """Admin credentials for virtual machine. - - :param username: Username of admin account. - :type username: str - :param password: Password of admin account. - :type password: str - :param public_key_data: Public key data. - :type public_key_data: str - :param private_key_data: Private key data. - :type private_key_data: str - """ - - _attribute_map = { - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, - 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualMachineSshCredentials, self).__init__(**kwargs) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.public_key_data = kwargs.get('public_key_data', None) - self.private_key_data = kwargs.get('private_key_data', None) - - -class Workspace(Resource): - """An object that represents a machine learning workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar workspace_id: The immutable id associated with this workspace. - :vartype workspace_id: str - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. This name in mutable. - :type friendly_name: str - :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format. - :vartype creation_time: ~datetime.datetime - :param key_vault: ARM id of the key vault associated with this workspace. This cannot be - changed once the workspace has been created. - :type key_vault: str - :param application_insights: ARM id of the application insights associated with this workspace. - This cannot be changed once the workspace has been created. - :type application_insights: str - :param container_registry: ARM id of the container registry associated with this workspace. - This cannot be changed once the workspace has been created. - :type container_registry: str - :param storage_account: ARM id of the storage account associated with this workspace. This - cannot be changed once the workspace has been created. - :type storage_account: str - :param discovery_url: Url for the discovery service to identify regional endpoints for machine - learning experimentation services. - :type discovery_url: str - :ivar provisioning_state: The current deployment state of workspace resource. The - provisioningState is to indicate states for resource provisioning. Possible values include: - "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param encryption: The encryption settings of Azure ML workspace. - :type encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty - :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data - collected by the service. - :type hbi_workspace: bool - :ivar service_provisioned_resource_group: The name of the managed resource group created by - workspace RP in customer subscription if the workspace is CMK workspace. - :vartype service_provisioned_resource_group: str - :ivar private_link_count: Count of private connections in the workspace. - :vartype private_link_count: int - :param image_build_compute: The compute name for image build. - :type image_build_compute: str - :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public - access when behind VNet. - :type allow_public_access_when_behind_vnet: bool - :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. - :vartype private_endpoint_connections: - list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] - :param shared_private_link_resources: The list of shared private link resources in this - workspace. - :type shared_private_link_resources: - list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] - :ivar notebook_info: The notebook info of Azure ML workspace. - :vartype notebook_info: ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'workspace_id': {'readonly': True}, - 'creation_time': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'service_provisioned_resource_group': {'readonly': True}, - 'private_link_count': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, - 'notebook_info': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, - 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, - 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, - 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, - 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, - 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, - 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, - } - - def __init__( - self, - **kwargs - ): - super(Workspace, self).__init__(**kwargs) - self.workspace_id = None - self.description = kwargs.get('description', None) - self.friendly_name = kwargs.get('friendly_name', None) - self.creation_time = None - self.key_vault = kwargs.get('key_vault', None) - self.application_insights = kwargs.get('application_insights', None) - self.container_registry = kwargs.get('container_registry', None) - self.storage_account = kwargs.get('storage_account', None) - self.discovery_url = kwargs.get('discovery_url', None) - self.provisioning_state = None - self.encryption = kwargs.get('encryption', None) - self.hbi_workspace = kwargs.get('hbi_workspace', False) - self.service_provisioned_resource_group = None - self.private_link_count = None - self.image_build_compute = kwargs.get('image_build_compute', None) - self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False) - self.private_endpoint_connections = None - self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None) - self.notebook_info = None - - -class WorkspaceConnection(msrest.serialization.Model): - """Workspace connection. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: ResourceId of the workspace connection. - :vartype id: str - :ivar name: Friendly name of the workspace connection. - :vartype name: str - :ivar type: Resource type of workspace connection. - :vartype type: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.category = kwargs.get('category', None) - self.target = kwargs.get('target', None) - self.auth_type = kwargs.get('auth_type', None) - self.value = kwargs.get('value', None) - - -class WorkspaceConnectionDto(msrest.serialization.Model): - """object used for creating workspace connection. - - :param name: Friendly name of the workspace connection. - :type name: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceConnectionDto, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.category = kwargs.get('category', None) - self.target = kwargs.get('target', None) - self.auth_type = kwargs.get('auth_type', None) - self.value = kwargs.get('value', None) - - -class WorkspaceListResult(msrest.serialization.Model): - """The result of a request to list machine learning workspaces. - - :param value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :type value: list[~azure.mgmt.machinelearningservices.models.Workspace] - :param next_link: The URI that can be used to request the next list of machine learning - workspaces. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Workspace]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class WorkspaceSku(msrest.serialization.Model): - """AML workspace sku information. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar resource_type: - :vartype resource_type: str - :ivar skus: The list of workspace sku settings. - :vartype skus: list[~azure.mgmt.machinelearningservices.models.SkuSettings] - """ - - _validation = { - 'resource_type': {'readonly': True}, - 'skus': {'readonly': True}, - } - - _attribute_map = { - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'skus': {'key': 'skus', 'type': '[SkuSettings]'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceSku, self).__init__(**kwargs) - self.resource_type = None - self.skus = None - - -class WorkspaceUpdateParameters(msrest.serialization.Model): - """The parameters for updating a machine learning workspace. - - :param tags: A set of tags. The resource tags for the machine learning workspace. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. - :type friendly_name: str - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceUpdateParameters, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.sku = kwargs.get('sku', None) - self.description = kwargs.get('description', None) - self.friendly_name = kwargs.get('friendly_name', None) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py index 8abd97a8cef06..08022287a0de7 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py @@ -7,53 +7,230 @@ # -------------------------------------------------------------------------- import datetime -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union from azure.core.exceptions import HttpResponseError import msrest.serialization -from ._azure_machine_learning_workspaces_enums import * +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + import __init__ as _models + + +class DatastoreCredentials(msrest.serialization.Model): + """Base definition for datastore credentials. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, KerberosKeytabCredentials, KerberosPasswordCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, ServicePrincipalDatastoreCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + } + + _subtype_map = { + 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', 'Certificate': 'CertificateDatastoreCredentials', 'KerberosKeytab': 'KerberosKeytabCredentials', 'KerberosPassword': 'KerberosPasswordCredentials', 'None': 'NoneDatastoreCredentials', 'Sas': 'SasDatastoreCredentials', 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(DatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = None # type: Optional[str] + + +class AccountKeyDatastoreCredentials(DatastoreCredentials): + """Account key datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar secrets: Required. [Required] Storage account secrets. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets + """ + + _validation = { + 'credentials_type': {'required': True}, + 'secrets': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'}, + } + + def __init__( + self, + *, + secrets: "_models.AccountKeyDatastoreSecrets", + **kwargs + ): + """ + :keyword secrets: Required. [Required] Storage account secrets. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets + """ + super(AccountKeyDatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = 'AccountKey' # type: str + self.secrets = secrets + + +class DatastoreSecrets(msrest.serialization.Model): + """Base definition for datastore secrets. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, KerberosKeytabSecrets, KerberosPasswordSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "Sas", + "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + } + + _subtype_map = { + 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', 'KerberosKeytab': 'KerberosKeytabSecrets', 'KerberosPassword': 'KerberosPasswordSecrets', 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(DatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = None # type: Optional[str] + + +class AccountKeyDatastoreSecrets(DatastoreSecrets): + """Datastore account key secrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "Sas", + "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar key: Storage account key. + :vartype key: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__( + self, + *, + key: Optional[str] = None, + **kwargs + ): + """ + :keyword key: Storage account key. + :paramtype key: str + """ + super(AccountKeyDatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = 'AccountKey' # type: str + self.key = key + + +class AKSSchema(msrest.serialization.Model): + """AKSSchema. + + :ivar properties: AKS properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.AKSSchemaProperties"] = None, + **kwargs + ): + """ + :keyword properties: AKS properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + """ + super(AKSSchema, self).__init__(**kwargs) + self.properties = properties class Compute(msrest.serialization.Model): """Machine Learning compute object. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, VirtualMachine. + sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, Kubernetes, SynapseSpark, VirtualMachine. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'created_on': {'readonly': True}, 'modified_on': {'readonly': True}, @@ -69,25 +246,35 @@ class Compute(msrest.serialization.Model): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, } _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'VirtualMachine': 'VirtualMachine'} + 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'Kubernetes': 'Kubernetes', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'} } def __init__( self, *, - compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, **kwargs ): + """ + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ super(Compute, self).__init__(**kwargs) self.compute_type = None # type: Optional[str] - self.compute_location = compute_location + self.compute_location = None self.provisioning_state = None self.description = description self.created_on = None @@ -95,46 +282,50 @@ def __init__( self.resource_id = resource_id self.provisioning_errors = None self.is_attached_compute = None + self.disable_local_auth = disable_local_auth -class AKS(Compute): +class AKS(Compute, AKSSchema): """A Machine Learning compute based on AKS. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar properties: AKS properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool - :param properties: AKS properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AKSProperties + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'created_on': {'readonly': True}, 'modified_on': {'readonly': True}, @@ -143,6 +334,7 @@ class AKS(Compute): } _attribute_map = { + 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'compute_location': {'key': 'computeLocation', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, @@ -150,23 +342,86 @@ class AKS(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AKSProperties'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, } def __init__( self, *, - compute_location: Optional[str] = None, + properties: Optional["_models.AKSSchemaProperties"] = None, description: Optional[str] = None, resource_id: Optional[str] = None, - properties: Optional["AKSProperties"] = None, + disable_local_auth: Optional[bool] = None, **kwargs ): - super(AKS, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'AKS' # type: str + """ + :keyword properties: AKS properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(AKS, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) self.properties = properties + self.compute_type = 'AKS' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class AksComputeSecretsProperties(msrest.serialization.Model): + """Properties of AksComputeSecrets. + + :ivar user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :vartype user_kube_config: str + :ivar admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :vartype admin_kube_config: str + :ivar image_pull_secret_name: Image registry pull secret. + :vartype image_pull_secret_name: str + """ + + _attribute_map = { + 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, + 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, + 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, + } + + def __init__( + self, + *, + user_kube_config: Optional[str] = None, + admin_kube_config: Optional[str] = None, + image_pull_secret_name: Optional[str] = None, + **kwargs + ): + """ + :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype user_kube_config: str + :keyword admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype admin_kube_config: str + :keyword image_pull_secret_name: Image registry pull secret. + :paramtype image_pull_secret_name: str + """ + super(AksComputeSecretsProperties, self).__init__(**kwargs) + self.user_kube_config = user_kube_config + self.admin_kube_config = admin_kube_config + self.image_pull_secret_name = image_pull_secret_name class ComputeSecrets(msrest.serialization.Model): @@ -177,10 +432,10 @@ class ComputeSecrets(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { @@ -199,27 +454,29 @@ def __init__( self, **kwargs ): + """ + """ super(ComputeSecrets, self).__init__(**kwargs) self.compute_type = None # type: Optional[str] -class AksComputeSecrets(ComputeSecrets): +class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties): """Secrets related to a Machine Learning compute based on AKS. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param user_kube_config: Content of kubeconfig file that can be used to connect to the + :ivar user_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. - :type user_kube_config: str - :param admin_kube_config: Content of kubeconfig file that can be used to connect to the + :vartype user_kube_config: str + :ivar admin_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. - :type admin_kube_config: str - :param image_pull_secret_name: Image registry pull secret. - :type image_pull_secret_name: str + :vartype admin_kube_config: str + :ivar image_pull_secret_name: Image registry pull secret. + :vartype image_pull_secret_name: str + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { @@ -227,10 +484,10 @@ class AksComputeSecrets(ComputeSecrets): } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( @@ -241,27 +498,37 @@ def __init__( image_pull_secret_name: Optional[str] = None, **kwargs ): - super(AksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'AKS' # type: str + """ + :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype user_kube_config: str + :keyword admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :paramtype admin_kube_config: str + :keyword image_pull_secret_name: Image registry pull secret. + :paramtype image_pull_secret_name: str + """ + super(AksComputeSecrets, self).__init__(user_kube_config=user_kube_config, admin_kube_config=admin_kube_config, image_pull_secret_name=image_pull_secret_name, **kwargs) self.user_kube_config = user_kube_config self.admin_kube_config = admin_kube_config self.image_pull_secret_name = image_pull_secret_name + self.compute_type = 'AKS' # type: str class AksNetworkingConfiguration(msrest.serialization.Model): """Advance configuration for AKS networking. - :param subnet_id: Virtual network subnet resource ID the compute nodes belong to. - :type subnet_id: str - :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must + :ivar subnet_id: Virtual network subnet resource ID the compute nodes belong to. + :vartype subnet_id: str + :ivar service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must not overlap with any Subnet IP ranges. - :type service_cidr: str - :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within + :vartype service_cidr: str + :ivar dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within the Kubernetes service address range specified in serviceCidr. - :type dns_service_ip: str - :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It + :vartype dns_service_ip: str + :ivar docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It must not overlap with any Subnet IP ranges or the Kubernetes service address range. - :type docker_bridge_cidr: str + :vartype docker_bridge_cidr: str """ _validation = { @@ -286,6 +553,19 @@ def __init__( docker_bridge_cidr: Optional[str] = None, **kwargs ): + """ + :keyword subnet_id: Virtual network subnet resource ID the compute nodes belong to. + :paramtype subnet_id: str + :keyword service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It + must not overlap with any Subnet IP ranges. + :paramtype service_cidr: str + :keyword dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be + within the Kubernetes service address range specified in serviceCidr. + :paramtype dns_service_ip: str + :keyword docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It + must not overlap with any Subnet IP ranges or the Kubernetes service address range. + :paramtype docker_bridge_cidr: str + """ super(AksNetworkingConfiguration, self).__init__(**kwargs) self.subnet_id = subnet_id self.service_cidr = service_cidr @@ -293,38 +573,49 @@ def __init__( self.docker_bridge_cidr = docker_bridge_cidr -class AKSProperties(msrest.serialization.Model): +class AKSSchemaProperties(msrest.serialization.Model): """AKS properties. Variables are only populated by the server, and will be ignored when sending a request. - :param cluster_fqdn: Cluster full qualified domain name. - :type cluster_fqdn: str + :ivar cluster_fqdn: Cluster full qualified domain name. + :vartype cluster_fqdn: str :ivar system_services: System services. :vartype system_services: list[~azure.mgmt.machinelearningservices.models.SystemService] - :param agent_count: Number of agents. - :type agent_count: int - :param agent_vm_size: Agent virtual machine size. - :type agent_vm_size: str - :param ssl_configuration: SSL configuration. - :type ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration - :param aks_networking_configuration: AKS networking configuration for vnet. - :type aks_networking_configuration: + :ivar agent_count: Number of agents. + :vartype agent_count: int + :ivar agent_vm_size: Agent virtual machine size. + :vartype agent_vm_size: str + :ivar cluster_purpose: Intended usage of the cluster. Known values are: "FastProd", + "DenseProd", "DevTest". Default value: "FastProd". + :vartype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose + :ivar ssl_configuration: SSL configuration. + :vartype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration + :ivar aks_networking_configuration: AKS networking configuration for vnet. + :vartype aks_networking_configuration: ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration + :ivar load_balancer_type: Load Balancer Type. Known values are: "PublicIp", + "InternalLoadBalancer". Default value: "PublicIp". + :vartype load_balancer_type: str or ~azure.mgmt.machinelearningservices.models.LoadBalancerType + :ivar load_balancer_subnet: Load Balancer Subnet. + :vartype load_balancer_subnet: str """ _validation = { 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 1}, + 'agent_count': {'minimum': 0}, } _attribute_map = { 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, + 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'}, + 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, + 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'}, + 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'}, } def __init__( @@ -333,57 +624,113 @@ def __init__( cluster_fqdn: Optional[str] = None, agent_count: Optional[int] = None, agent_vm_size: Optional[str] = None, - ssl_configuration: Optional["SslConfiguration"] = None, - aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None, + cluster_purpose: Optional[Union[str, "_models.ClusterPurpose"]] = "FastProd", + ssl_configuration: Optional["_models.SslConfiguration"] = None, + aks_networking_configuration: Optional["_models.AksNetworkingConfiguration"] = None, + load_balancer_type: Optional[Union[str, "_models.LoadBalancerType"]] = "PublicIp", + load_balancer_subnet: Optional[str] = None, **kwargs ): - super(AKSProperties, self).__init__(**kwargs) + """ + :keyword cluster_fqdn: Cluster full qualified domain name. + :paramtype cluster_fqdn: str + :keyword agent_count: Number of agents. + :paramtype agent_count: int + :keyword agent_vm_size: Agent virtual machine size. + :paramtype agent_vm_size: str + :keyword cluster_purpose: Intended usage of the cluster. Known values are: "FastProd", + "DenseProd", "DevTest". Default value: "FastProd". + :paramtype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose + :keyword ssl_configuration: SSL configuration. + :paramtype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration + :keyword aks_networking_configuration: AKS networking configuration for vnet. + :paramtype aks_networking_configuration: + ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration + :keyword load_balancer_type: Load Balancer Type. Known values are: "PublicIp", + "InternalLoadBalancer". Default value: "PublicIp". + :paramtype load_balancer_type: str or + ~azure.mgmt.machinelearningservices.models.LoadBalancerType + :keyword load_balancer_subnet: Load Balancer Subnet. + :paramtype load_balancer_subnet: str + """ + super(AKSSchemaProperties, self).__init__(**kwargs) self.cluster_fqdn = cluster_fqdn self.system_services = None self.agent_count = agent_count self.agent_vm_size = agent_vm_size + self.cluster_purpose = cluster_purpose self.ssl_configuration = ssl_configuration self.aks_networking_configuration = aks_networking_configuration + self.load_balancer_type = load_balancer_type + self.load_balancer_subnet = load_balancer_subnet + + +class AmlComputeSchema(msrest.serialization.Model): + """Properties(top level) of AmlCompute. + + :ivar properties: Properties of AmlCompute. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.AmlComputeProperties"] = None, + **kwargs + ): + """ + :keyword properties: Properties of AmlCompute. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + """ + super(AmlComputeSchema, self).__init__(**kwargs) + self.properties = properties -class AmlCompute(Compute): +class AmlCompute(Compute, AmlComputeSchema): """An Azure Machine Learning compute. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar properties: Properties of AmlCompute. + :vartype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool - :param properties: AML Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'created_on': {'readonly': True}, 'modified_on': {'readonly': True}, @@ -392,6 +739,7 @@ class AmlCompute(Compute): } _attribute_map = { + 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'compute_location': {'key': 'computeLocation', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, @@ -399,23 +747,43 @@ class AmlCompute(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, } def __init__( self, *, - compute_location: Optional[str] = None, + properties: Optional["_models.AmlComputeProperties"] = None, description: Optional[str] = None, resource_id: Optional[str] = None, - properties: Optional["AmlComputeProperties"] = None, + disable_local_auth: Optional[bool] = None, **kwargs ): - super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'AmlCompute' # type: str + """ + :keyword properties: Properties of AmlCompute. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(AmlCompute, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) self.properties = properties + self.compute_type = 'AmlCompute' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth class AmlComputeNodeInformation(msrest.serialization.Model): @@ -432,8 +800,8 @@ class AmlComputeNodeInformation(msrest.serialization.Model): :ivar port: SSH port number of the node. :vartype port: int :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, - leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", - "leaving", "preempted". + leaving and preempted. Known values are: "idle", "running", "preparing", "unusable", "leaving", + "preempted". :vartype node_state: str or ~azure.mgmt.machinelearningservices.models.NodeState :ivar run_id: ID of the Experiment running on the node, if any else null. :vartype run_id: str @@ -461,6 +829,8 @@ def __init__( self, **kwargs ): + """ + """ super(AmlComputeNodeInformation, self).__init__(**kwargs) self.node_id = None self.private_ip_address = None @@ -470,83 +840,36 @@ def __init__( self.run_id = None -class ComputeNodesInformation(msrest.serialization.Model): - """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlComputeNodesInformation. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str - """ - - _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} - } - - def __init__( - self, - **kwargs - ): - super(ComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - self.next_link = None - - -class AmlComputeNodesInformation(ComputeNodesInformation): - """Compute node information related to a AmlCompute. +class AmlComputeNodesInformation(msrest.serialization.Model): + """Result of AmlCompute Nodes. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar next_link: The continuation token. - :vartype next_link: str :ivar nodes: The collection of returned AmlCompute nodes details. :vartype nodes: list[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :ivar next_link: The continuation token. + :vartype next_link: str """ _validation = { - 'compute_type': {'required': True}, - 'next_link': {'readonly': True}, 'nodes': {'readonly': True}, + 'next_link': {'readonly': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): + """ + """ super(AmlComputeNodesInformation, self).__init__(**kwargs) - self.compute_type = 'AmlCompute' # type: str self.nodes = None + self.next_link = None class AmlComputeProperties(msrest.serialization.Model): @@ -554,40 +877,45 @@ class AmlComputeProperties(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated", - "LowPriority". - :type vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority - :param scale_settings: Scale settings for AML Compute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - :param user_account_credentials: Credentials for an administrator user account that will be + :ivar os_type: Compute OS Type. Known values are: "Linux", "Windows". Default value: "Linux". + :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType + :ivar vm_size: Virtual Machine Size. + :vartype vm_size: str + :ivar vm_priority: Virtual Machine priority. Known values are: "Dedicated", "LowPriority". + :vartype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority + :ivar virtual_machine_image: Virtual Machine image for AML Compute - windows only. + :vartype virtual_machine_image: ~azure.mgmt.machinelearningservices.models.VirtualMachineImage + :ivar isolated_network: Network is isolated or not. + :vartype isolated_network: bool + :ivar scale_settings: Scale settings for AML Compute. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + :ivar user_account_credentials: Credentials for an administrator user account that will be created on each compute node. - :type user_account_credentials: + :vartype user_account_credentials: ~azure.mgmt.machinelearningservices.models.UserAccountCredentials - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param remote_login_port_public_access: State of the public SSH port. Possible values are: + :ivar subnet: Virtual network subnet resource ID the compute nodes belong to. + :vartype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :ivar remote_login_port_public_access: State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", + creation it will be either enabled or disabled. Known values are: "Enabled", "Disabled", "NotSpecified". Default value: "NotSpecified". - :type remote_login_port_public_access: str or + :vartype remote_login_port_public_access: str or ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess :ivar allocation_state: Allocation state of the compute. Possible values are: steady - Indicates that the compute is not resizing. There are no changes to the number of compute nodes in the compute in progress. A compute enters this state when it is created and when no operations are being performed on the compute to change the number of compute nodes. resizing - Indicates that the compute is resizing; that is, compute nodes are being added to or removed - from the compute. Possible values include: "Steady", "Resizing". + from the compute. Known values are: "Steady", "Resizing". :vartype allocation_state: str or ~azure.mgmt.machinelearningservices.models.AllocationState :ivar allocation_state_transition_time: The time at which the compute entered its current allocation state. :vartype allocation_state_transition_time: ~datetime.datetime :ivar errors: Collection of errors encountered by various compute nodes during node setup. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar current_node_count: The number of compute nodes currently assigned to the compute. :vartype current_node_count: int :ivar target_node_count: The target number of compute nodes for the compute. If the @@ -597,6 +925,13 @@ class AmlComputeProperties(msrest.serialization.Model): :vartype target_node_count: int :ivar node_state_counts: Counts of various node states on the compute. :vartype node_state_counts: ~azure.mgmt.machinelearningservices.models.NodeStateCounts + :ivar enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :vartype enable_node_public_ip: bool + :ivar property_bag: A property bag containing additional properties. + :vartype property_bag: any """ _validation = { @@ -609,34 +944,85 @@ class AmlComputeProperties(msrest.serialization.Model): } _attribute_map = { + 'os_type': {'key': 'osType', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, + 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'}, + 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'}, 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, 'allocation_state': {'key': 'allocationState', 'type': 'str'}, 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, + 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, + 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, + 'property_bag': {'key': 'propertyBag', 'type': 'object'}, } def __init__( self, *, + os_type: Optional[Union[str, "_models.OsType"]] = "Linux", vm_size: Optional[str] = None, - vm_priority: Optional[Union[str, "VmPriority"]] = None, - scale_settings: Optional["ScaleSettings"] = None, - user_account_credentials: Optional["UserAccountCredentials"] = None, - subnet: Optional["ResourceId"] = None, - remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified", + vm_priority: Optional[Union[str, "_models.VmPriority"]] = None, + virtual_machine_image: Optional["_models.VirtualMachineImage"] = None, + isolated_network: Optional[bool] = None, + scale_settings: Optional["_models.ScaleSettings"] = None, + user_account_credentials: Optional["_models.UserAccountCredentials"] = None, + subnet: Optional["_models.ResourceId"] = None, + remote_login_port_public_access: Optional[Union[str, "_models.RemoteLoginPortPublicAccess"]] = "NotSpecified", + enable_node_public_ip: Optional[bool] = True, + property_bag: Optional[Any] = None, **kwargs ): + """ + :keyword os_type: Compute OS Type. Known values are: "Linux", "Windows". Default value: + "Linux". + :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType + :keyword vm_size: Virtual Machine Size. + :paramtype vm_size: str + :keyword vm_priority: Virtual Machine priority. Known values are: "Dedicated", "LowPriority". + :paramtype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority + :keyword virtual_machine_image: Virtual Machine image for AML Compute - windows only. + :paramtype virtual_machine_image: + ~azure.mgmt.machinelearningservices.models.VirtualMachineImage + :keyword isolated_network: Network is isolated or not. + :paramtype isolated_network: bool + :keyword scale_settings: Scale settings for AML Compute. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + :keyword user_account_credentials: Credentials for an administrator user account that will be + created on each compute node. + :paramtype user_account_credentials: + ~azure.mgmt.machinelearningservices.models.UserAccountCredentials + :keyword subnet: Virtual network subnet resource ID the compute nodes belong to. + :paramtype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :keyword remote_login_port_public_access: State of the public SSH port. Possible values are: + Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - + Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - + Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, + else is open all public nodes. It can be default only during cluster creation time, after + creation it will be either enabled or disabled. Known values are: "Enabled", "Disabled", + "NotSpecified". Default value: "NotSpecified". + :paramtype remote_login_port_public_access: str or + ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess + :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :paramtype enable_node_public_ip: bool + :keyword property_bag: A property bag containing additional properties. + :paramtype property_bag: any + """ super(AmlComputeProperties, self).__init__(**kwargs) + self.os_type = os_type self.vm_size = vm_size self.vm_priority = vm_priority + self.virtual_machine_image = virtual_machine_image + self.isolated_network = isolated_network self.scale_settings = scale_settings self.user_account_credentials = user_account_credentials self.subnet = subnet @@ -647,2557 +1033,21177 @@ def __init__( self.current_node_count = None self.target_node_count = None self.node_state_counts = None + self.enable_node_public_ip = enable_node_public_ip + self.property_bag = property_bag -class AmlUserFeature(msrest.serialization.Model): - """Features enabled for a workspace. +class AmlOperation(msrest.serialization.Model): + """Azure Machine Learning workspace REST API operation. - :param id: Specifies the feature ID. - :type id: str - :param display_name: Specifies the feature name. - :type display_name: str - :param description: Describes the feature for user experience. - :type description: str + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :ivar display: Display name of operation. + :vartype display: ~azure.mgmt.machinelearningservices.models.AmlOperationDisplay + :ivar is_data_action: Indicates whether the operation applies to data-plane. + :vartype is_data_action: bool """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'AmlOperationDisplay'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, } def __init__( self, *, - id: Optional[str] = None, - display_name: Optional[str] = None, - description: Optional[str] = None, + name: Optional[str] = None, + display: Optional["_models.AmlOperationDisplay"] = None, + is_data_action: Optional[bool] = None, **kwargs ): - super(AmlUserFeature, self).__init__(**kwargs) - self.id = id - self.display_name = display_name - self.description = description + """ + :keyword name: Operation name: {provider}/{resource}/{operation}. + :paramtype name: str + :keyword display: Display name of operation. + :paramtype display: ~azure.mgmt.machinelearningservices.models.AmlOperationDisplay + :keyword is_data_action: Indicates whether the operation applies to data-plane. + :paramtype is_data_action: bool + """ + super(AmlOperation, self).__init__(**kwargs) + self.name = name + self.display = display + self.is_data_action = is_data_action -class ClusterUpdateParameters(msrest.serialization.Model): - """AmlCompute update parameters. +class AmlOperationDisplay(msrest.serialization.Model): + """Display name of operation. - :param scale_settings: Desired scale settings for the amlCompute. - :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + :ivar provider: The resource provider name: Microsoft.MachineLearningExperimentation. + :vartype provider: str + :ivar resource: The resource on which the operation is performed. + :vartype resource: str + :ivar operation: The operation that users can perform. + :vartype operation: str + :ivar description: The description for the operation. + :vartype description: str """ _attribute_map = { - 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, } def __init__( self, *, - scale_settings: Optional["ScaleSettings"] = None, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, **kwargs ): - super(ClusterUpdateParameters, self).__init__(**kwargs) - self.scale_settings = scale_settings - + """ + :keyword provider: The resource provider name: Microsoft.MachineLearningExperimentation. + :paramtype provider: str + :keyword resource: The resource on which the operation is performed. + :paramtype resource: str + :keyword operation: The operation that users can perform. + :paramtype operation: str + :keyword description: The description for the operation. + :paramtype description: str + """ + super(AmlOperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description -class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model): - """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties. - Variables are only populated by the server, and will be ignored when sending a request. +class AmlOperationListResult(msrest.serialization.Model): + """An array of operations supported by the resource provider. - :ivar principal_id: The principal id of user assigned identity. - :vartype principal_id: str - :ivar client_id: The client id of user assigned identity. - :vartype client_id: str + :ivar value: List of AML workspace operations supported by the AML workspace resource provider. + :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] """ - _validation = { - 'principal_id': {'readonly': True}, - 'client_id': {'readonly': True}, - } - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[AmlOperation]'}, } def __init__( self, + *, + value: Optional[List["_models.AmlOperation"]] = None, + **kwargs + ): + """ + :keyword value: List of AML workspace operations supported by the AML workspace resource + provider. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] + """ + super(AmlOperationListResult, self).__init__(**kwargs) + self.value = value + + +class IdentityConfiguration(msrest.serialization.Model): + """Base definition for identity configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmlToken, ManagedIdentity, UserIdentity. + + All required parameters must be populated in order to send to Azure. + + :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant + filled by server. Known values are: "Managed", "AMLToken", "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType + """ + + _validation = { + 'identity_type': {'required': True}, + } + + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, + } + + _subtype_map = { + 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity', 'UserIdentity': 'UserIdentity'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(IdentityConfiguration, self).__init__(**kwargs) + self.identity_type = None # type: Optional[str] + + +class AmlToken(IdentityConfiguration): + """AML Token identity configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant + filled by server. Known values are: "Managed", "AMLToken", "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType + """ + + _validation = { + 'identity_type': {'required': True}, + } + + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AmlToken, self).__init__(**kwargs) + self.identity_type = 'AMLToken' # type: str + + +class AmlUserFeature(msrest.serialization.Model): + """Features enabled for a workspace. + + :ivar id: Specifies the feature ID. + :vartype id: str + :ivar display_name: Specifies the feature name. + :vartype display_name: str + :ivar description: Describes the feature for user experience. + :vartype description: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + display_name: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword id: Specifies the feature ID. + :paramtype id: str + :keyword display_name: Specifies the feature name. + :paramtype display_name: str + :keyword description: Describes the feature for user experience. + :paramtype description: str + """ + super(AmlUserFeature, self).__init__(**kwargs) + self.id = id + self.display_name = display_name + self.description = description + + +class ResourceBase(msrest.serialization.Model): + """ResourceBase. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + """ + super(ResourceBase, self).__init__(**kwargs) + self.description = description + self.properties = properties + self.tags = tags + + +class AssetBase(ResourceBase): + """AssetBase. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super(AssetBase, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + self.is_anonymous = is_anonymous + self.is_archived = is_archived + + +class AssetContainer(ResourceBase): + """AssetContainer. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + """ + + _validation = { + 'latest_version': {'readonly': True}, + 'next_version': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'latest_version': {'key': 'latestVersion', 'type': 'str'}, + 'next_version': {'key': 'nextVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super(AssetContainer, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + self.is_archived = is_archived + self.latest_version = None + self.next_version = None + + +class AssetJobInput(msrest.serialization.Model): + """Asset input type. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: Required. [Required] Input Asset URI. + :vartype uri: str + """ + + _validation = { + 'uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + } + + def __init__( + self, + *, + uri: str, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs + ): + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: Required. [Required] Input Asset URI. + :paramtype uri: str + """ + super(AssetJobInput, self).__init__(**kwargs) + self.mode = mode + self.uri = uri + + +class AssetJobOutput(msrest.serialization.Model): + """Asset output type. + + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + """ + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + } + + def __init__( + self, + *, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + **kwargs + ): + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + """ + super(AssetJobOutput, self).__init__(**kwargs) + self.mode = mode + self.uri = uri + + +class AssetReferenceBase(msrest.serialization.Model): + """Base definition for asset references. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataPathAssetReference, IdAssetReference, OutputPathAssetReference. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant + filled by server. Known values are: "Id", "DataPath", "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + """ + + _validation = { + 'reference_type': {'required': True}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + _subtype_map = { + 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AssetReferenceBase, self).__init__(**kwargs) + self.reference_type = None # type: Optional[str] + + +class AssignedUser(msrest.serialization.Model): + """A user that can be assigned to a compute instance. + + All required parameters must be populated in order to send to Azure. + + :ivar object_id: Required. User’s AAD Object Id. + :vartype object_id: str + :ivar tenant_id: Required. User’s AAD Tenant Id. + :vartype tenant_id: str + """ + + _validation = { + 'object_id': {'required': True}, + 'tenant_id': {'required': True}, + } + + _attribute_map = { + 'object_id': {'key': 'objectId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + *, + object_id: str, + tenant_id: str, + **kwargs + ): + """ + :keyword object_id: Required. User’s AAD Object Id. + :paramtype object_id: str + :keyword tenant_id: Required. User’s AAD Tenant Id. + :paramtype tenant_id: str + """ + super(AssignedUser, self).__init__(**kwargs) + self.object_id = object_id + self.tenant_id = tenant_id + + +class ForecastHorizon(msrest.serialization.Model): + """The desired maximum forecast horizon in units of time-series frequency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoForecastHorizon, CustomForecastHorizon. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + _subtype_map = { + 'mode': {'Auto': 'AutoForecastHorizon', 'Custom': 'CustomForecastHorizon'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ForecastHorizon, self).__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoForecastHorizon(ForecastHorizon): + """Forecast horizon determined automatically by system. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AutoForecastHorizon, self).__init__(**kwargs) + self.mode = 'Auto' # type: str + + +class JobBaseProperties(ResourceBase): + """Base definition for a job. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoMLJob, CommandJob, LabelingJobProperties, PipelineJob, SparkJob, SweepJob. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. Known + values are: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + """ + + _validation = { + 'job_type': {'required': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'component_id': {'key': 'componentId', 'type': 'str'}, + 'compute_id': {'key': 'computeId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'services': {'key': 'services', 'type': '{JobService}'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + _subtype_map = { + 'job_type': {'AutoML': 'AutoMLJob', 'Command': 'CommandJob', 'Labeling': 'LabelingJobProperties', 'Pipeline': 'PipelineJob', 'Spark': 'SparkJob', 'Sweep': 'SweepJob'} + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: Optional[str] = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: Optional[bool] = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + """ + super(JobBaseProperties, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + self.component_id = component_id + self.compute_id = compute_id + self.display_name = display_name + self.experiment_name = experiment_name + self.identity = identity + self.is_archived = is_archived + self.job_type = 'JobBaseProperties' # type: str + self.services = services + self.status = None + + +class AutoMLJob(JobBaseProperties): + """AutoMLJob class. +Use this class for executing AutoML tasks like Classification/Regression etc. +See TaskType enum for all the tasks supported. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. Known + values are: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar environment_id: The ARM resource ID of the Environment specification for the job. + This is optional value to provide, if not provided, AutoML will default this to Production + AutoML curated environment version when running the job. + :vartype environment_id: str + :ivar environment_variables: Environment variables included in the job. + :vartype environment_variables: dict[str, str] + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + :ivar task_details: Required. [Required] This represents scenario which can be one of + Tables/NLP/Image. + :vartype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical + """ + + _validation = { + 'job_type': {'required': True}, + 'status': {'readonly': True}, + 'task_details': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'component_id': {'key': 'componentId', 'type': 'str'}, + 'compute_id': {'key': 'computeId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'services': {'key': 'services', 'type': '{JobService}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, + 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, + 'task_details': {'key': 'taskDetails', 'type': 'AutoMLVertical'}, + } + + def __init__( + self, + *, + task_details: "_models.AutoMLVertical", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: Optional[str] = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: Optional[bool] = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword environment_id: The ARM resource ID of the Environment specification for the job. + This is optional value to provide, if not provided, AutoML will default this to Production + AutoML curated environment version when running the job. + :paramtype environment_id: str + :keyword environment_variables: Environment variables included in the job. + :paramtype environment_variables: dict[str, str] + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + :keyword task_details: Required. [Required] This represents scenario which can be one of + Tables/NLP/Image. + :paramtype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical + """ + super(AutoMLJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, services=services, **kwargs) + self.job_type = 'AutoML' # type: str + self.environment_id = environment_id + self.environment_variables = environment_variables + self.outputs = outputs + self.resources = resources + self.task_details = task_details + + +class AutoMLVertical(msrest.serialization.Model): + """AutoML vertical class. +Base class for AutoML verticals - TableVertical/ImageVertical/NLPVertical. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: Classification, Forecasting, ImageClassification, ImageClassificationMultilabel, ImageInstanceSegmentation, ImageObjectDetection, Regression, TextClassification, TextClassificationMultilabel, TextNer. + + All required parameters must be populated in order to send to Azure. + + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + + _validation = { + 'task_type': {'required': True}, + 'training_data': {'required': True}, + } + + _attribute_map = { + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + } + + _subtype_map = { + 'task_type': {'Classification': 'Classification', 'Forecasting': 'Forecasting', 'ImageClassification': 'ImageClassification', 'ImageClassificationMultilabel': 'ImageClassificationMultilabel', 'ImageInstanceSegmentation': 'ImageInstanceSegmentation', 'ImageObjectDetection': 'ImageObjectDetection', 'Regression': 'Regression', 'TextClassification': 'TextClassification', 'TextClassificationMultilabel': 'TextClassificationMultilabel', 'TextNER': 'TextNer'} + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + **kwargs + ): + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super(AutoMLVertical, self).__init__(**kwargs) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type = None # type: Optional[str] + self.training_data = training_data + + +class NCrossValidations(msrest.serialization.Model): + """N-Cross validations value. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoNCrossValidations, CustomNCrossValidations. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + _subtype_map = { + 'mode': {'Auto': 'AutoNCrossValidations', 'Custom': 'CustomNCrossValidations'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(NCrossValidations, self).__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoNCrossValidations(NCrossValidations): + """N-Cross validations determined automatically. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AutoNCrossValidations, self).__init__(**kwargs) + self.mode = 'Auto' # type: str + + +class AutoPauseProperties(msrest.serialization.Model): + """Auto pause properties. + + :ivar delay_in_minutes: + :vartype delay_in_minutes: int + :ivar enabled: + :vartype enabled: bool + """ + + _attribute_map = { + 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + delay_in_minutes: Optional[int] = None, + enabled: Optional[bool] = None, + **kwargs + ): + """ + :keyword delay_in_minutes: + :paramtype delay_in_minutes: int + :keyword enabled: + :paramtype enabled: bool + """ + super(AutoPauseProperties, self).__init__(**kwargs) + self.delay_in_minutes = delay_in_minutes + self.enabled = enabled + + +class AutoScaleProperties(msrest.serialization.Model): + """Auto scale properties. + + :ivar min_node_count: + :vartype min_node_count: int + :ivar enabled: + :vartype enabled: bool + :ivar max_node_count: + :vartype max_node_count: int + """ + + _attribute_map = { + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + } + + def __init__( + self, + *, + min_node_count: Optional[int] = None, + enabled: Optional[bool] = None, + max_node_count: Optional[int] = None, + **kwargs + ): + """ + :keyword min_node_count: + :paramtype min_node_count: int + :keyword enabled: + :paramtype enabled: bool + :keyword max_node_count: + :paramtype max_node_count: int + """ + super(AutoScaleProperties, self).__init__(**kwargs) + self.min_node_count = min_node_count + self.enabled = enabled + self.max_node_count = max_node_count + + +class Seasonality(msrest.serialization.Model): + """Forecasting seasonality. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoSeasonality, CustomSeasonality. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Known values are: + "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + _subtype_map = { + 'mode': {'Auto': 'AutoSeasonality', 'Custom': 'CustomSeasonality'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(Seasonality, self).__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoSeasonality(Seasonality): + """AutoSeasonality. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Known values are: + "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AutoSeasonality, self).__init__(**kwargs) + self.mode = 'Auto' # type: str + + +class TargetLags(msrest.serialization.Model): + """The number of past periods to lag from the target column. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoTargetLags, CustomTargetLags. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. + Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + _subtype_map = { + 'mode': {'Auto': 'AutoTargetLags', 'Custom': 'CustomTargetLags'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(TargetLags, self).__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoTargetLags(TargetLags): + """AutoTargetLags. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. + Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AutoTargetLags, self).__init__(**kwargs) + self.mode = 'Auto' # type: str + + +class TargetRollingWindowSize(msrest.serialization.Model): + """Forecasting target rolling window size. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoTargetRollingWindowSize, CustomTargetRollingWindowSize. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + _subtype_map = { + 'mode': {'Auto': 'AutoTargetRollingWindowSize', 'Custom': 'CustomTargetRollingWindowSize'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(TargetRollingWindowSize, self).__init__(**kwargs) + self.mode = None # type: Optional[str] + + +class AutoTargetRollingWindowSize(TargetRollingWindowSize): + """Target lags rolling window determined automatically. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode + """ + + _validation = { + 'mode': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(AutoTargetRollingWindowSize, self).__init__(**kwargs) + self.mode = 'Auto' # type: str + + +class AzureDatastore(msrest.serialization.Model): + """Base definition for Azure datastore contents configuration. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + """ + + _attribute_map = { + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + **kwargs + ): + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + """ + super(AzureDatastore, self).__init__(**kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class DatastoreProperties(ResourceBase): + """Base definition for datastore contents configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureBlobDatastore, AzureDataLakeGen1Datastore, AzureDataLakeGen2Datastore, AzureFileDatastore, HdfsDatastore. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: Required. [Required] Account credentials. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled + by server. Known values are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", + "AzureFile", "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + """ + + _validation = { + 'credentials': {'required': True}, + 'datastore_type': {'required': True}, + 'is_default': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + } + + _subtype_map = { + 'datastore_type': {'AzureBlob': 'AzureBlobDatastore', 'AzureDataLakeGen1': 'AzureDataLakeGen1Datastore', 'AzureDataLakeGen2': 'AzureDataLakeGen2Datastore', 'AzureFile': 'AzureFileDatastore', 'Hdfs': 'HdfsDatastore'} + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: Required. [Required] Account credentials. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + """ + super(DatastoreProperties, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + self.credentials = credentials + self.datastore_type = 'DatastoreProperties' # type: str + self.is_default = None + + +class AzureBlobDatastore(DatastoreProperties, AzureDatastore): + """Azure Blob datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: Required. [Required] Account credentials. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled + by server. Known values are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", + "AzureFile", "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar account_name: Storage account name. + :vartype account_name: str + :ivar container_name: Storage account container name. + :vartype container_name: str + :ivar endpoint: Azure cloud endpoint for the storage account. + :vartype endpoint: str + :ivar protocol: Protocol used to communicate with the storage account. + :vartype protocol: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + + _validation = { + 'credentials': {'required': True}, + 'datastore_type': {'required': True}, + 'is_default': {'readonly': True}, + } + + _attribute_map = { + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, + 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + account_name: Optional[str] = None, + container_name: Optional[str] = None, + endpoint: Optional[str] = None, + protocol: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: Required. [Required] Account credentials. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword account_name: Storage account name. + :paramtype account_name: str + :keyword container_name: Storage account container name. + :paramtype container_name: str + :keyword endpoint: Azure cloud endpoint for the storage account. + :paramtype endpoint: str + :keyword protocol: Protocol used to communicate with the storage account. + :paramtype protocol: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + super(AzureBlobDatastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, resource_group=resource_group, subscription_id=subscription_id, **kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + self.datastore_type = 'AzureBlob' # type: str + self.account_name = account_name + self.container_name = container_name + self.endpoint = endpoint + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.is_default = None + + +class AzureDataLakeGen1Datastore(DatastoreProperties, AzureDatastore): + """Azure Data Lake Gen1 datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: Required. [Required] Account credentials. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled + by server. Known values are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", + "AzureFile", "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + :ivar store_name: Required. [Required] Azure Data Lake store name. + :vartype store_name: str + """ + + _validation = { + 'credentials': {'required': True}, + 'datastore_type': {'required': True}, + 'is_default': {'readonly': True}, + 'store_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + 'store_name': {'key': 'storeName', 'type': 'str'}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + store_name: str, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: Required. [Required] Account credentials. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + :keyword store_name: Required. [Required] Azure Data Lake store name. + :paramtype store_name: str + """ + super(AzureDataLakeGen1Datastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, resource_group=resource_group, subscription_id=subscription_id, **kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + self.datastore_type = 'AzureDataLakeGen1' # type: str + self.service_data_access_auth_identity = service_data_access_auth_identity + self.store_name = store_name + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.is_default = None + + +class AzureDataLakeGen2Datastore(DatastoreProperties, AzureDatastore): + """Azure Data Lake Gen2 datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: Required. [Required] Account credentials. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled + by server. Known values are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", + "AzureFile", "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar account_name: Required. [Required] Storage account name. + :vartype account_name: str + :ivar endpoint: Azure cloud endpoint for the storage account. + :vartype endpoint: str + :ivar filesystem: Required. [Required] The name of the Data Lake Gen2 filesystem. + :vartype filesystem: str + :ivar protocol: Protocol used to communicate with the storage account. + :vartype protocol: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + + _validation = { + 'credentials': {'required': True}, + 'datastore_type': {'required': True}, + 'is_default': {'readonly': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'filesystem': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'filesystem': {'key': 'filesystem', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, + 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + account_name: str, + filesystem: str, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + endpoint: Optional[str] = None, + protocol: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: Required. [Required] Account credentials. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword account_name: Required. [Required] Storage account name. + :paramtype account_name: str + :keyword endpoint: Azure cloud endpoint for the storage account. + :paramtype endpoint: str + :keyword filesystem: Required. [Required] The name of the Data Lake Gen2 filesystem. + :paramtype filesystem: str + :keyword protocol: Protocol used to communicate with the storage account. + :paramtype protocol: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + super(AzureDataLakeGen2Datastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, resource_group=resource_group, subscription_id=subscription_id, **kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + self.datastore_type = 'AzureDataLakeGen2' # type: str + self.account_name = account_name + self.endpoint = endpoint + self.filesystem = filesystem + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.is_default = None + + +class AzureFileDatastore(DatastoreProperties, AzureDatastore): + """Azure File datastore configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: Required. [Required] Account credentials. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled + by server. Known values are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", + "AzureFile", "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar account_name: Required. [Required] Storage account name. + :vartype account_name: str + :ivar endpoint: Azure cloud endpoint for the storage account. + :vartype endpoint: str + :ivar file_share_name: Required. [Required] The name of the Azure file share that the datastore + points to. + :vartype file_share_name: str + :ivar protocol: Protocol used to communicate with the storage account. + :vartype protocol: str + :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :vartype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + + _validation = { + 'credentials': {'required': True}, + 'datastore_type': {'required': True}, + 'is_default': {'readonly': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'file_share_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'file_share_name': {'key': 'fileShareName', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, + 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + account_name: str, + file_share_name: str, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + endpoint: Optional[str] = None, + protocol: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs + ): + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: Required. [Required] Account credentials. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword account_name: Required. [Required] Storage account name. + :paramtype account_name: str + :keyword endpoint: Azure cloud endpoint for the storage account. + :paramtype endpoint: str + :keyword file_share_name: Required. [Required] The name of the Azure file share that the + datastore points to. + :paramtype file_share_name: str + :keyword protocol: Protocol used to communicate with the storage account. + :paramtype protocol: str + :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + :paramtype service_data_access_auth_identity: str or + ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity + """ + super(AzureFileDatastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, resource_group=resource_group, subscription_id=subscription_id, **kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + self.datastore_type = 'AzureFile' # type: str + self.account_name = account_name + self.endpoint = endpoint + self.file_share_name = file_share_name + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.description = description + self.properties = properties + self.tags = tags + self.credentials = credentials + self.is_default = None + + +class EarlyTerminationPolicy(msrest.serialization.Model): + """Early termination policies enable canceling poor-performing runs before they complete. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BanditPolicy, MedianStoppingPolicy, TruncationSelectionPolicy. + + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. + Known values are: "Bandit", "MedianStopping", "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + """ + + _validation = { + 'policy_type': {'required': True}, + } + + _attribute_map = { + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + } + + _subtype_map = { + 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', 'TruncationSelection': 'TruncationSelectionPolicy'} + } + + def __init__( + self, + *, + delay_evaluation: Optional[int] = 0, + evaluation_interval: Optional[int] = 0, + **kwargs + ): + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + """ + super(EarlyTerminationPolicy, self).__init__(**kwargs) + self.delay_evaluation = delay_evaluation + self.evaluation_interval = evaluation_interval + self.policy_type = None # type: Optional[str] + + +class BanditPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation. + + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. + Known values are: "Bandit", "MedianStopping", "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + :ivar slack_amount: Absolute distance allowed from the best performing run. + :vartype slack_amount: float + :ivar slack_factor: Ratio of the allowed distance from the best performing run. + :vartype slack_factor: float + """ + + _validation = { + 'policy_type': {'required': True}, + } + + _attribute_map = { + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, + 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, + } + + def __init__( + self, + *, + delay_evaluation: Optional[int] = 0, + evaluation_interval: Optional[int] = 0, + slack_amount: Optional[float] = 0, + slack_factor: Optional[float] = 0, + **kwargs + ): + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + :keyword slack_amount: Absolute distance allowed from the best performing run. + :paramtype slack_amount: float + :keyword slack_factor: Ratio of the allowed distance from the best performing run. + :paramtype slack_factor: float + """ + super(BanditPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type = 'Bandit' # type: str + self.slack_amount = slack_amount + self.slack_factor = slack_factor + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.system_data = None + + +class TrackedResource(Resource): + """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: Required. The geo-location where the resource lives. + :vartype location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: Required. The geo-location where the resource lives. + :paramtype location: str + """ + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location + + +class BatchDeployment(TrackedResource): + """BatchDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: Required. The geo-location where the resource lives. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'BatchDeploymentProperties'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.BatchDeploymentProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: Required. The geo-location where the resource lives. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super(BatchDeployment, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class EndpointDeploymentPropertiesBase(msrest.serialization.Model): + """Base definition for endpoint deployment. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + """ + + _attribute_map = { + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + """ + super(EndpointDeploymentPropertiesBase, self).__init__(**kwargs) + self.code_configuration = code_configuration + self.description = description + self.environment_id = environment_id + self.environment_variables = environment_variables + self.properties = properties + + +class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): + """Batch inference settings per deployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar compute: Compute target for batch inference operation. + :vartype compute: str + :ivar error_threshold: Error threshold, if the error count for the entire input goes above this + value, + the batch inference will be aborted. Range is [-1, int.MaxValue]. + For FileDataset, this value is the count of file failures. + For TabularDataset, this value is the count of record failures. + If set to -1 (the lower bound), all failures during batch inference will be ignored. + :vartype error_threshold: int + :ivar logging_level: Logging level for batch inference operation. Known values are: "Info", + "Warning", "Debug". + :vartype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel + :ivar max_concurrency_per_instance: Indicates maximum number of parallelism per instance. + :vartype max_concurrency_per_instance: int + :ivar mini_batch_size: Size of the mini-batch passed to each batch invocation. + For FileDataset, this is the number of files per mini-batch. + For TabularDataset, this is the size of the records in bytes, per mini-batch. + :vartype mini_batch_size: long + :ivar model: Reference to the model asset for the endpoint deployment. + :vartype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :ivar output_action: Indicates how the output will be organized. Known values are: + "SummaryOnly", "AppendRow". + :vartype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction + :ivar output_file_name: Customized output file name for append_row output action. + :vartype output_file_name: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar resources: Indicates compute configuration for the job. + If not provided, will default to the defaults defined in ResourceConfiguration. + :vartype resources: ~azure.mgmt.machinelearningservices.models.DeploymentResourceConfiguration + :ivar retry_settings: Retry Settings for the batch inference operation. + If not provided, will default to the defaults defined in BatchRetrySettings. + :vartype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'compute': {'key': 'compute', 'type': 'str'}, + 'error_threshold': {'key': 'errorThreshold', 'type': 'int'}, + 'logging_level': {'key': 'loggingLevel', 'type': 'str'}, + 'max_concurrency_per_instance': {'key': 'maxConcurrencyPerInstance', 'type': 'int'}, + 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'output_action': {'key': 'outputAction', 'type': 'str'}, + 'output_file_name': {'key': 'outputFileName', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'resources': {'key': 'resources', 'type': 'DeploymentResourceConfiguration'}, + 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'}, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + compute: Optional[str] = None, + error_threshold: Optional[int] = -1, + logging_level: Optional[Union[str, "_models.BatchLoggingLevel"]] = None, + max_concurrency_per_instance: Optional[int] = 1, + mini_batch_size: Optional[int] = 10, + model: Optional["_models.AssetReferenceBase"] = None, + output_action: Optional[Union[str, "_models.BatchOutputAction"]] = None, + output_file_name: Optional[str] = "predictions.csv", + resources: Optional["_models.DeploymentResourceConfiguration"] = None, + retry_settings: Optional["_models.BatchRetrySettings"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword compute: Compute target for batch inference operation. + :paramtype compute: str + :keyword error_threshold: Error threshold, if the error count for the entire input goes above + this value, + the batch inference will be aborted. Range is [-1, int.MaxValue]. + For FileDataset, this value is the count of file failures. + For TabularDataset, this value is the count of record failures. + If set to -1 (the lower bound), all failures during batch inference will be ignored. + :paramtype error_threshold: int + :keyword logging_level: Logging level for batch inference operation. Known values are: "Info", + "Warning", "Debug". + :paramtype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel + :keyword max_concurrency_per_instance: Indicates maximum number of parallelism per instance. + :paramtype max_concurrency_per_instance: int + :keyword mini_batch_size: Size of the mini-batch passed to each batch invocation. + For FileDataset, this is the number of files per mini-batch. + For TabularDataset, this is the size of the records in bytes, per mini-batch. + :paramtype mini_batch_size: long + :keyword model: Reference to the model asset for the endpoint deployment. + :paramtype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :keyword output_action: Indicates how the output will be organized. Known values are: + "SummaryOnly", "AppendRow". + :paramtype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction + :keyword output_file_name: Customized output file name for append_row output action. + :paramtype output_file_name: str + :keyword resources: Indicates compute configuration for the job. + If not provided, will default to the defaults defined in ResourceConfiguration. + :paramtype resources: + ~azure.mgmt.machinelearningservices.models.DeploymentResourceConfiguration + :keyword retry_settings: Retry Settings for the batch inference operation. + If not provided, will default to the defaults defined in BatchRetrySettings. + :paramtype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings + """ + super(BatchDeploymentProperties, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, **kwargs) + self.compute = compute + self.error_threshold = error_threshold + self.logging_level = logging_level + self.max_concurrency_per_instance = max_concurrency_per_instance + self.mini_batch_size = mini_batch_size + self.model = model + self.output_action = output_action + self.output_file_name = output_file_name + self.provisioning_state = None + self.resources = resources + self.retry_settings = retry_settings + + +class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of BatchDeployment entities. + + :ivar next_link: The link to the next page of BatchDeployment objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type BatchDeployment. + :vartype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[BatchDeployment]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.BatchDeployment"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of BatchDeployment objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type BatchDeployment. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] + """ + super(BatchDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class BatchEndpoint(TrackedResource): + """BatchEndpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: Required. The geo-location where the resource lives. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'BatchEndpointProperties'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.BatchEndpointProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: Required. The geo-location where the resource lives. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super(BatchEndpoint, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class BatchEndpointDefaults(msrest.serialization.Model): + """Batch endpoint default values. + + :ivar deployment_name: Name of the deployment that will be default for the endpoint. + This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. + :vartype deployment_name: str + """ + + _attribute_map = { + 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, + } + + def __init__( + self, + *, + deployment_name: Optional[str] = None, + **kwargs + ): + """ + :keyword deployment_name: Name of the deployment that will be default for the endpoint. + This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. + :paramtype deployment_name: str + """ + super(BatchEndpointDefaults, self).__init__(**kwargs) + self.deployment_name = deployment_name + + +class EndpointPropertiesBase(msrest.serialization.Model): + """Inference Endpoint base definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for + Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Known values are: "AMLToken", "Key", "AADToken". + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :ivar description: Description of the inference endpoint. + :vartype description: str + :ivar keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + """ + + _validation = { + 'auth_mode': {'required': True}, + 'scoring_uri': {'readonly': True}, + 'swagger_uri': {'readonly': True}, + } + + _attribute_map = { + 'auth_mode': {'key': 'authMode', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, + 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + } + + def __init__( + self, + *, + auth_mode: Union[str, "_models.EndpointAuthMode"], + description: Optional[str] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' + for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' + does. Known values are: "AMLToken", "Key", "AADToken". + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :keyword description: Description of the inference endpoint. + :paramtype description: str + :keyword keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + """ + super(EndpointPropertiesBase, self).__init__(**kwargs) + self.auth_mode = auth_mode + self.description = description + self.keys = keys + self.properties = properties + self.scoring_uri = None + self.swagger_uri = None + + +class BatchEndpointProperties(EndpointPropertiesBase): + """Batch endpoint configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for + Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Known values are: "AMLToken", "Key", "AADToken". + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :ivar description: Description of the inference endpoint. + :vartype description: str + :ivar keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :ivar defaults: Default values for Batch Endpoint. + :vartype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + """ + + _validation = { + 'auth_mode': {'required': True}, + 'scoring_uri': {'readonly': True}, + 'swagger_uri': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'auth_mode': {'key': 'authMode', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, + 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + 'defaults': {'key': 'defaults', 'type': 'BatchEndpointDefaults'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + auth_mode: Union[str, "_models.EndpointAuthMode"], + description: Optional[str] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, + properties: Optional[Dict[str, str]] = None, + defaults: Optional["_models.BatchEndpointDefaults"] = None, + **kwargs + ): + """ + :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' + for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' + does. Known values are: "AMLToken", "Key", "AADToken". + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :keyword description: Description of the inference endpoint. + :paramtype description: str + :keyword keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword defaults: Default values for Batch Endpoint. + :paramtype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults + """ + super(BatchEndpointProperties, self).__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) + self.defaults = defaults + self.provisioning_state = None + + +class BatchEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of BatchEndpoint entities. + + :ivar next_link: The link to the next page of BatchEndpoint objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type BatchEndpoint. + :vartype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[BatchEndpoint]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.BatchEndpoint"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of BatchEndpoint objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type BatchEndpoint. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + """ + super(BatchEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class BatchRetrySettings(msrest.serialization.Model): + """Retry settings for a batch inference operation. + + :ivar max_retries: Maximum retry count for a mini-batch. + :vartype max_retries: int + :ivar timeout: Invocation timeout for a mini-batch, in ISO 8601 format. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + 'max_retries': {'key': 'maxRetries', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__( + self, + *, + max_retries: Optional[int] = 3, + timeout: Optional[datetime.timedelta] = "PT30S", + **kwargs + ): + """ + :keyword max_retries: Maximum retry count for a mini-batch. + :paramtype max_retries: int + :keyword timeout: Invocation timeout for a mini-batch, in ISO 8601 format. + :paramtype timeout: ~datetime.timedelta + """ + super(BatchRetrySettings, self).__init__(**kwargs) + self.max_retries = max_retries + self.timeout = timeout + + +class SamplingAlgorithm(msrest.serialization.Model): + """The Sampling Algorithm used to generate hyperparameter values, along with properties to +configure the algorithm. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BayesianSamplingAlgorithm, GridSamplingAlgorithm, RandomSamplingAlgorithm. + + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating + hyperparameter values, along with configuration properties.Constant filled by server. Known + values are: "Grid", "Random", "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + 'sampling_algorithm_type': {'required': True}, + } + + _attribute_map = { + 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + } + + _subtype_map = { + 'sampling_algorithm_type': {'Bayesian': 'BayesianSamplingAlgorithm', 'Grid': 'GridSamplingAlgorithm', 'Random': 'RandomSamplingAlgorithm'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(SamplingAlgorithm, self).__init__(**kwargs) + self.sampling_algorithm_type = None # type: Optional[str] + + +class BayesianSamplingAlgorithm(SamplingAlgorithm): + """Defines a Sampling Algorithm that generates values based on previous values. + + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating + hyperparameter values, along with configuration properties.Constant filled by server. Known + values are: "Grid", "Random", "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + 'sampling_algorithm_type': {'required': True}, + } + + _attribute_map = { + 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(BayesianSamplingAlgorithm, self).__init__(**kwargs) + self.sampling_algorithm_type = 'Bayesian' # type: str + + +class BindOptions(msrest.serialization.Model): + """BindOptions. + + :ivar propagation: Type of Bind Option. + :vartype propagation: str + :ivar create_host_path: Indicate whether to create host path. + :vartype create_host_path: bool + :ivar selinux: Mention the selinux options. + :vartype selinux: str + """ + + _attribute_map = { + 'propagation': {'key': 'propagation', 'type': 'str'}, + 'create_host_path': {'key': 'createHostPath', 'type': 'bool'}, + 'selinux': {'key': 'selinux', 'type': 'str'}, + } + + def __init__( + self, + *, + propagation: Optional[str] = None, + create_host_path: Optional[bool] = None, + selinux: Optional[str] = None, + **kwargs + ): + """ + :keyword propagation: Type of Bind Option. + :paramtype propagation: str + :keyword create_host_path: Indicate whether to create host path. + :paramtype create_host_path: bool + :keyword selinux: Mention the selinux options. + :paramtype selinux: str + """ + super(BindOptions, self).__init__(**kwargs) + self.propagation = propagation + self.create_host_path = create_host_path + self.selinux = selinux + + +class BuildContext(msrest.serialization.Model): + """Configuration settings for Docker build context. + + All required parameters must be populated in order to send to Azure. + + :ivar context_uri: Required. [Required] URI of the Docker build context used to build the + image. Supports blob URIs on environment creation and may return blob or Git URIs. + + + .. raw:: html + + . + :vartype context_uri: str + :ivar dockerfile_path: Path to the Dockerfile in the build context. + + + .. raw:: html + + . + :vartype dockerfile_path: str + """ + + _validation = { + 'context_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'context_uri': {'key': 'contextUri', 'type': 'str'}, + 'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'}, + } + + def __init__( + self, + *, + context_uri: str, + dockerfile_path: Optional[str] = "Dockerfile", + **kwargs + ): + """ + :keyword context_uri: Required. [Required] URI of the Docker build context used to build the + image. Supports blob URIs on environment creation and may return blob or Git URIs. + + + .. raw:: html + + . + :paramtype context_uri: str + :keyword dockerfile_path: Path to the Dockerfile in the build context. + + + .. raw:: html + + . + :paramtype dockerfile_path: str + """ + super(BuildContext, self).__init__(**kwargs) + self.context_uri = context_uri + self.dockerfile_path = dockerfile_path + + +class CertificateDatastoreCredentials(DatastoreCredentials): + """Certificate datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar authority_url: Authority URL used for authentication. + :vartype authority_url: str + :ivar client_id: Required. [Required] Service principal client ID. + :vartype client_id: str + :ivar resource_url: Resource the service principal has access to. + :vartype resource_url: str + :ivar secrets: Required. [Required] Service principal secrets. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets + :ivar tenant_id: Required. [Required] ID of the tenant to which the service principal belongs. + :vartype tenant_id: str + :ivar thumbprint: Required. [Required] Thumbprint of the certificate used for authentication. + :vartype thumbprint: str + """ + + _validation = { + 'credentials_type': {'required': True}, + 'client_id': {'required': True}, + 'secrets': {'required': True}, + 'tenant_id': {'required': True}, + 'thumbprint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, + } + + def __init__( + self, + *, + client_id: str, + secrets: "_models.CertificateDatastoreSecrets", + tenant_id: str, + thumbprint: str, + authority_url: Optional[str] = None, + resource_url: Optional[str] = None, + **kwargs + ): + """ + :keyword authority_url: Authority URL used for authentication. + :paramtype authority_url: str + :keyword client_id: Required. [Required] Service principal client ID. + :paramtype client_id: str + :keyword resource_url: Resource the service principal has access to. + :paramtype resource_url: str + :keyword secrets: Required. [Required] Service principal secrets. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets + :keyword tenant_id: Required. [Required] ID of the tenant to which the service principal + belongs. + :paramtype tenant_id: str + :keyword thumbprint: Required. [Required] Thumbprint of the certificate used for + authentication. + :paramtype thumbprint: str + """ + super(CertificateDatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = 'Certificate' # type: str + self.authority_url = authority_url + self.client_id = client_id + self.resource_url = resource_url + self.secrets = secrets + self.tenant_id = tenant_id + self.thumbprint = thumbprint + + +class CertificateDatastoreSecrets(DatastoreSecrets): + """Datastore certificate secrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "Sas", + "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar certificate: Service principal certificate. + :vartype certificate: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'certificate': {'key': 'certificate', 'type': 'str'}, + } + + def __init__( + self, + *, + certificate: Optional[str] = None, + **kwargs + ): + """ + :keyword certificate: Service principal certificate. + :paramtype certificate: str + """ + super(CertificateDatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = 'Certificate' # type: str + self.certificate = certificate + + +class TableVertical(msrest.serialization.Model): + """Abstract class for AutoML tasks that use table dataset as input - such as Classification/Regression/Forecasting. + + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + """ + + _attribute_map = { + 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, + 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, + 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, + 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, + } + + def __init__( + self, + *, + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + **kwargs + ): + """ + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + """ + super(TableVertical, self).__init__(**kwargs) + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + + +class Classification(AutoMLVertical, TableVertical): + """Classification task in AutoML Table vertical. + + All required parameters must be populated in order to send to Azure. + + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar positive_label: Positive label for binary metrics calculation. + :vartype positive_label: str + :ivar primary_metric: Primary metric for the task. Known values are: "AUCWeighted", "Accuracy", + "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + :ivar training_settings: Inputs for training phase for an AutoML Job. + :vartype training_settings: + ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings + """ + + _validation = { + 'task_type': {'required': True}, + 'training_data': {'required': True}, + } + + _attribute_map = { + 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, + 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, + 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, + 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + 'training_settings': {'key': 'trainingSettings', 'type': 'ClassificationTrainingSettings'}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + positive_label: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + training_settings: Optional["_models.ClassificationTrainingSettings"] = None, + **kwargs + ): + """ + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword positive_label: Positive label for binary metrics calculation. + :paramtype positive_label: str + :keyword primary_metric: Primary metric for the task. Known values are: "AUCWeighted", + "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + :keyword training_settings: Inputs for training phase for an AutoML Job. + :paramtype training_settings: + ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings + """ + super(Classification, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, limit_settings=limit_settings, n_cross_validations=n_cross_validations, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, validation_data_size=validation_data_size, weight_column_name=weight_column_name, **kwargs) + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + self.task_type = 'Classification' # type: str + self.positive_label = positive_label + self.primary_metric = primary_metric + self.training_settings = training_settings + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class TrainingSettings(msrest.serialization.Model): + """Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + """ + + _attribute_map = { + 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, + 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, + 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, + 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, + 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, + 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, + 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, + } + + def __init__( + self, + *, + enable_dnn_training: Optional[bool] = False, + enable_model_explainability: Optional[bool] = True, + enable_onnx_compatible_models: Optional[bool] = False, + enable_stack_ensemble: Optional[bool] = True, + enable_vote_ensemble: Optional[bool] = True, + ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + **kwargs + ): + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + """ + super(TrainingSettings, self).__init__(**kwargs) + self.enable_dnn_training = enable_dnn_training + self.enable_model_explainability = enable_model_explainability + self.enable_onnx_compatible_models = enable_onnx_compatible_models + self.enable_stack_ensemble = enable_stack_ensemble + self.enable_vote_ensemble = enable_vote_ensemble + self.ensemble_model_download_timeout = ensemble_model_download_timeout + self.stack_ensemble_settings = stack_ensemble_settings + + +class ClassificationTrainingSettings(TrainingSettings): + """Classification Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar allowed_training_algorithms: Allowed models for classification task. + :vartype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + :ivar blocked_training_algorithms: Blocked models for classification task. + :vartype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + """ + + _attribute_map = { + 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, + 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, + 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, + 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, + 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, + 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, + 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, + 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, + 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + } + + def __init__( + self, + *, + enable_dnn_training: Optional[bool] = False, + enable_model_explainability: Optional[bool] = True, + enable_onnx_compatible_models: Optional[bool] = False, + enable_stack_ensemble: Optional[bool] = True, + enable_vote_ensemble: Optional[bool] = True, + ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, + **kwargs + ): + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword allowed_training_algorithms: Allowed models for classification task. + :paramtype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + :keyword blocked_training_algorithms: Blocked models for classification task. + :paramtype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ClassificationModels] + """ + super(ClassificationTrainingSettings, self).__init__(enable_dnn_training=enable_dnn_training, enable_model_explainability=enable_model_explainability, enable_onnx_compatible_models=enable_onnx_compatible_models, enable_stack_ensemble=enable_stack_ensemble, enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, **kwargs) + self.allowed_training_algorithms = allowed_training_algorithms + self.blocked_training_algorithms = blocked_training_algorithms + + +class ClusterUpdateParameters(msrest.serialization.Model): + """AmlCompute update parameters. + + :ivar properties: Properties of ClusterUpdate. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation + """ + + _attribute_map = { + 'properties': {'key': 'properties.properties', 'type': 'ScaleSettingsInformation'}, + } + + def __init__( + self, + *, + properties: Optional["_models.ScaleSettingsInformation"] = None, + **kwargs + ): + """ + :keyword properties: Properties of ClusterUpdate. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation + """ + super(ClusterUpdateParameters, self).__init__(**kwargs) + self.properties = properties + + +class ExportSummary(msrest.serialization.Model): + """ExportSummary. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CsvExportSummary, CocoExportSummary, DatasetExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar format: Required. [Required] The format of exported labels, also as the + discriminator.Constant filled by server. Known values are: "Dataset", "Coco", "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + """ + + _validation = { + 'end_date_time': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'format': {'required': True}, + 'labeling_job_id': {'readonly': True}, + 'start_date_time': {'readonly': True}, + } + + _attribute_map = { + 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'format': {'key': 'format', 'type': 'str'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + } + + _subtype_map = { + 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ExportSummary, self).__init__(**kwargs) + self.end_date_time = None + self.exported_row_count = None + self.format = None # type: Optional[str] + self.labeling_job_id = None + self.start_date_time = None + + +class CocoExportSummary(ExportSummary): + """CocoExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar format: Required. [Required] The format of exported labels, also as the + discriminator.Constant filled by server. Known values are: "Dataset", "Coco", "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar container_name: The container name to which the labels will be exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + 'end_date_time': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'format': {'required': True}, + 'labeling_job_id': {'readonly': True}, + 'start_date_time': {'readonly': True}, + 'container_name': {'readonly': True}, + 'snapshot_path': {'readonly': True}, + } + + _attribute_map = { + 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'format': {'key': 'format', 'type': 'str'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(CocoExportSummary, self).__init__(**kwargs) + self.format = 'Coco' # type: str + self.container_name = None + self.snapshot_path = None + + +class CodeConfiguration(msrest.serialization.Model): + """Configuration for a scoring code asset. + + All required parameters must be populated in order to send to Azure. + + :ivar code_id: ARM resource ID of the code asset. + :vartype code_id: str + :ivar scoring_script: Required. [Required] The script to execute on startup. eg. "score.py". + :vartype scoring_script: str + """ + + _validation = { + 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'scoring_script': {'key': 'scoringScript', 'type': 'str'}, + } + + def __init__( + self, + *, + scoring_script: str, + code_id: Optional[str] = None, + **kwargs + ): + """ + :keyword code_id: ARM resource ID of the code asset. + :paramtype code_id: str + :keyword scoring_script: Required. [Required] The script to execute on startup. eg. "score.py". + :paramtype scoring_script: str + """ + super(CodeConfiguration, self).__init__(**kwargs) + self.code_id = code_id + self.scoring_script = scoring_script + + +class CodeContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'CodeContainerProperties'}, + } + + def __init__( + self, + *, + properties: "_models.CodeContainerProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties + """ + super(CodeContainer, self).__init__(**kwargs) + self.properties = properties + + +class CodeContainerProperties(AssetContainer): + """Container for code asset versions. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + """ + + _validation = { + 'latest_version': {'readonly': True}, + 'next_version': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'latest_version': {'key': 'latestVersion', 'type': 'str'}, + 'next_version': {'key': 'nextVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super(CodeContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + + +class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of CodeContainer entities. + + :ivar next_link: The link to the next page of CodeContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type CodeContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[CodeContainer]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.CodeContainer"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of CodeContainer objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type CodeContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] + """ + super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class CodeVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'CodeVersionProperties'}, + } + + def __init__( + self, + *, + properties: "_models.CodeVersionProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties + """ + super(CodeVersion, self).__init__(**kwargs) + self.properties = properties + + +class CodeVersionProperties(AssetBase): + """Code asset version details. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar code_uri: Uri where code is located. + :vartype code_uri: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'code_uri': {'key': 'codeUri', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, + code_uri: Optional[str] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword code_uri: Uri where code is located. + :paramtype code_uri: str + """ + super(CodeVersionProperties, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + self.code_uri = code_uri + + +class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of CodeVersion entities. + + :ivar next_link: The link to the next page of CodeVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type CodeVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[CodeVersion]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.CodeVersion"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of CodeVersion objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type CodeVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] + """ + super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ColumnTransformer(msrest.serialization.Model): + """Column transformer parameters. + + :ivar fields: Fields to apply transformer logic on. + :vartype fields: list[str] + :ivar parameters: Different properties to be passed to transformer. + Input expected is dictionary of key,value pairs in JSON format. + :vartype parameters: any + """ + + _attribute_map = { + 'fields': {'key': 'fields', 'type': '[str]'}, + 'parameters': {'key': 'parameters', 'type': 'object'}, + } + + def __init__( + self, + *, + fields: Optional[List[str]] = None, + parameters: Optional[Any] = None, + **kwargs + ): + """ + :keyword fields: Fields to apply transformer logic on. + :paramtype fields: list[str] + :keyword parameters: Different properties to be passed to transformer. + Input expected is dictionary of key,value pairs in JSON format. + :paramtype parameters: any + """ + super(ColumnTransformer, self).__init__(**kwargs) + self.fields = fields + self.parameters = parameters + + +class CommandJob(JobBaseProperties): + """Command job definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. Known + values are: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar code_id: ARM resource ID of the code asset. + :vartype code_id: str + :ivar command: Required. [Required] The command to execute on startup of the job. eg. "python + train.py". + :vartype command: str + :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, + Tensorflow, PyTorch, or null. + :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :ivar environment_id: Required. [Required] The ARM resource ID of the Environment specification + for the job. + :vartype environment_id: str + :ivar environment_variables: Environment variables included in the job. + :vartype environment_variables: dict[str, str] + :ivar inputs: Mapping of input data bindings used in the job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar limits: Command Job limit. + :vartype limits: ~azure.mgmt.machinelearningservices.models.CommandJobLimits + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar parameters: Input parameters. + :vartype parameters: any + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + """ + + _validation = { + 'job_type': {'required': True}, + 'status': {'readonly': True}, + 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + 'environment_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'parameters': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'component_id': {'key': 'componentId', 'type': 'str'}, + 'compute_id': {'key': 'computeId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'services': {'key': 'services', 'type': '{JobService}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, + 'limits': {'key': 'limits', 'type': 'CommandJobLimits'}, + 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, + 'parameters': {'key': 'parameters', 'type': 'object'}, + 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, + } + + def __init__( + self, + *, + command: str, + environment_id: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: Optional[str] = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: Optional[bool] = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + code_id: Optional[str] = None, + distribution: Optional["_models.DistributionConfiguration"] = None, + environment_variables: Optional[Dict[str, str]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + limits: Optional["_models.CommandJobLimits"] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword code_id: ARM resource ID of the code asset. + :paramtype code_id: str + :keyword command: Required. [Required] The command to execute on startup of the job. eg. + "python train.py". + :paramtype command: str + :keyword distribution: Distribution configuration of the job. If set, this should be one of + Mpi, Tensorflow, PyTorch, or null. + :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :keyword environment_id: Required. [Required] The ARM resource ID of the Environment + specification for the job. + :paramtype environment_id: str + :keyword environment_variables: Environment variables included in the job. + :paramtype environment_variables: dict[str, str] + :keyword inputs: Mapping of input data bindings used in the job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword limits: Command Job limit. + :paramtype limits: ~azure.mgmt.machinelearningservices.models.CommandJobLimits + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + """ + super(CommandJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, services=services, **kwargs) + self.job_type = 'Command' # type: str + self.code_id = code_id + self.command = command + self.distribution = distribution + self.environment_id = environment_id + self.environment_variables = environment_variables + self.inputs = inputs + self.limits = limits + self.outputs = outputs + self.parameters = None + self.resources = resources + + +class JobLimits(msrest.serialization.Model): + """JobLimits. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CommandJobLimits, SweepJobLimits. + + All required parameters must be populated in order to send to Azure. + + :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Known + values are: "Command", "Sweep". + :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType + :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. + Only supports duration with precision as low as Seconds. + :vartype timeout: ~datetime.timedelta + """ + + _validation = { + 'job_limits_type': {'required': True}, + } + + _attribute_map = { + 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + _subtype_map = { + 'job_limits_type': {'Command': 'CommandJobLimits', 'Sweep': 'SweepJobLimits'} + } + + def __init__( + self, + *, + timeout: Optional[datetime.timedelta] = None, + **kwargs + ): + """ + :keyword timeout: The max run duration in ISO 8601 format, after which the job will be + cancelled. Only supports duration with precision as low as Seconds. + :paramtype timeout: ~datetime.timedelta + """ + super(JobLimits, self).__init__(**kwargs) + self.job_limits_type = None # type: Optional[str] + self.timeout = timeout + + +class CommandJobLimits(JobLimits): + """Command Job limit class. + + All required parameters must be populated in order to send to Azure. + + :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Known + values are: "Command", "Sweep". + :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType + :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. + Only supports duration with precision as low as Seconds. + :vartype timeout: ~datetime.timedelta + """ + + _validation = { + 'job_limits_type': {'required': True}, + } + + _attribute_map = { + 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__( + self, + *, + timeout: Optional[datetime.timedelta] = None, + **kwargs + ): + """ + :keyword timeout: The max run duration in ISO 8601 format, after which the job will be + cancelled. Only supports duration with precision as low as Seconds. + :paramtype timeout: ~datetime.timedelta + """ + super(CommandJobLimits, self).__init__(timeout=timeout, **kwargs) + self.job_limits_type = 'Command' # type: str + + +class ComponentContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'ComponentContainerProperties'}, + } + + def __init__( + self, + *, + properties: "_models.ComponentContainerProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties + """ + super(ComponentContainer, self).__init__(**kwargs) + self.properties = properties + + +class ComponentContainerProperties(AssetContainer): + """Component container definition. + + +.. raw:: html + + . + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + """ + + _validation = { + 'latest_version': {'readonly': True}, + 'next_version': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'latest_version': {'key': 'latestVersion', 'type': 'str'}, + 'next_version': {'key': 'nextVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super(ComponentContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + + +class ComponentContainerResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of ComponentContainer entities. + + :ivar next_link: The link to the next page of ComponentContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ComponentContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[ComponentContainer]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.ComponentContainer"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of ComponentContainer objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ComponentContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] + """ + super(ComponentContainerResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ComponentVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'ComponentVersionProperties'}, + } + + def __init__( + self, + *, + properties: "_models.ComponentVersionProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties + """ + super(ComponentVersion, self).__init__(**kwargs) + self.properties = properties + + +class ComponentVersionProperties(AssetBase): + """Definition of a component version: defines resources that span component types. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar component_spec: Defines Component definition details. + + + .. raw:: html + + . + :vartype component_spec: any + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'component_spec': {'key': 'componentSpec', 'type': 'object'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, + component_spec: Optional[Any] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword component_spec: Defines Component definition details. + + + .. raw:: html + + . + :paramtype component_spec: any + """ + super(ComponentVersionProperties, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + self.component_spec = component_spec + + +class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of ComponentVersion entities. + + :ivar next_link: The link to the next page of ComponentVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ComponentVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[ComponentVersion]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.ComponentVersion"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of ComponentVersion objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ComponentVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] + """ + super(ComponentVersionResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ComputeInstanceSchema(msrest.serialization.Model): + """Properties(top level) of ComputeInstance. + + :ivar properties: Properties of ComputeInstance. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.ComputeInstanceProperties"] = None, + **kwargs + ): + """ + :keyword properties: Properties of ComputeInstance. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + """ + super(ComputeInstanceSchema, self).__init__(**kwargs) + self.properties = properties + + +class ComputeInstance(Compute, ComputeInstanceSchema): + """An Azure Machine Learning compute instance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: Properties of ComputeInstance. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + properties: Optional["_models.ComputeInstanceProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + """ + :keyword properties: Properties of ComputeInstance. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(ComputeInstance, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + self.properties = properties + self.compute_type = 'ComputeInstance' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class ComputeInstanceApplication(msrest.serialization.Model): + """Defines an Aml Instance application and its connectivity endpoint URI. + + :ivar display_name: Name of the ComputeInstance application. + :vartype display_name: str + :ivar endpoint_uri: Application' endpoint URI. + :vartype endpoint_uri: str + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + } + + def __init__( + self, + *, + display_name: Optional[str] = None, + endpoint_uri: Optional[str] = None, + **kwargs + ): + """ + :keyword display_name: Name of the ComputeInstance application. + :paramtype display_name: str + :keyword endpoint_uri: Application' endpoint URI. + :paramtype endpoint_uri: str + """ + super(ComputeInstanceApplication, self).__init__(**kwargs) + self.display_name = display_name + self.endpoint_uri = endpoint_uri + + +class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): + """Defines all connectivity endpoints and properties for an ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar public_ip_address: Public IP Address of this ComputeInstance. + :vartype public_ip_address: str + :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in + which the compute instance is deployed). + :vartype private_ip_address: str + """ + + _validation = { + 'public_ip_address': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + } + + _attribute_map = { + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) + self.public_ip_address = None + self.private_ip_address = None + + +class ComputeInstanceContainer(msrest.serialization.Model): + """Defines an Aml Instance container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the ComputeInstance container. + :vartype name: str + :ivar autosave: Auto save settings. Known values are: "None", "Local", "Remote". + :vartype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave + :ivar gpu: Information of GPU. + :vartype gpu: str + :ivar network: network of this container. Known values are: "Bridge", "Host". + :vartype network: str or ~azure.mgmt.machinelearningservices.models.Network + :ivar environment: Environment information of this container. + :vartype environment: ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo + :ivar services: services of this containers. + :vartype services: list[any] + """ + + _validation = { + 'services': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'autosave': {'key': 'autosave', 'type': 'str'}, + 'gpu': {'key': 'gpu', 'type': 'str'}, + 'network': {'key': 'network', 'type': 'str'}, + 'environment': {'key': 'environment', 'type': 'ComputeInstanceEnvironmentInfo'}, + 'services': {'key': 'services', 'type': '[object]'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + autosave: Optional[Union[str, "_models.Autosave"]] = None, + gpu: Optional[str] = None, + network: Optional[Union[str, "_models.Network"]] = None, + environment: Optional["_models.ComputeInstanceEnvironmentInfo"] = None, + **kwargs + ): + """ + :keyword name: Name of the ComputeInstance container. + :paramtype name: str + :keyword autosave: Auto save settings. Known values are: "None", "Local", "Remote". + :paramtype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave + :keyword gpu: Information of GPU. + :paramtype gpu: str + :keyword network: network of this container. Known values are: "Bridge", "Host". + :paramtype network: str or ~azure.mgmt.machinelearningservices.models.Network + :keyword environment: Environment information of this container. + :paramtype environment: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo + """ + super(ComputeInstanceContainer, self).__init__(**kwargs) + self.name = name + self.autosave = autosave + self.gpu = gpu + self.network = network + self.environment = environment + self.services = None + + +class ComputeInstanceCreatedBy(msrest.serialization.Model): + """Describes information on user who created this ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_name: Name of the user. + :vartype user_name: str + :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. + :vartype user_org_id: str + :ivar user_id: Uniquely identifies the user within his/her organization. + :vartype user_id: str + """ + + _validation = { + 'user_name': {'readonly': True}, + 'user_org_id': {'readonly': True}, + 'user_id': {'readonly': True}, + } + + _attribute_map = { + 'user_name': {'key': 'userName', 'type': 'str'}, + 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ComputeInstanceCreatedBy, self).__init__(**kwargs) + self.user_name = None + self.user_org_id = None + self.user_id = None + + +class ComputeInstanceDataDisk(msrest.serialization.Model): + """Defines an Aml Instance DataDisk. + + :ivar caching: Caching type of Data Disk. Known values are: "None", "ReadOnly", "ReadWrite". + :vartype caching: str or ~azure.mgmt.machinelearningservices.models.Caching + :ivar disk_size_gb: The initial disk size in gigabytes. + :vartype disk_size_gb: int + :ivar lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, + each should have a distinct lun. + :vartype lun: int + :ivar storage_account_type: type of this storage account. Known values are: "Standard_LRS", + "Premium_LRS". Default value: "Standard_LRS". + :vartype storage_account_type: str or + ~azure.mgmt.machinelearningservices.models.StorageAccountType + """ + + _attribute_map = { + 'caching': {'key': 'caching', 'type': 'str'}, + 'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'}, + 'lun': {'key': 'lun', 'type': 'int'}, + 'storage_account_type': {'key': 'storageAccountType', 'type': 'str'}, + } + + def __init__( + self, + *, + caching: Optional[Union[str, "_models.Caching"]] = None, + disk_size_gb: Optional[int] = None, + lun: Optional[int] = None, + storage_account_type: Optional[Union[str, "_models.StorageAccountType"]] = "Standard_LRS", + **kwargs + ): + """ + :keyword caching: Caching type of Data Disk. Known values are: "None", "ReadOnly", "ReadWrite". + :paramtype caching: str or ~azure.mgmt.machinelearningservices.models.Caching + :keyword disk_size_gb: The initial disk size in gigabytes. + :paramtype disk_size_gb: int + :keyword lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, + each should have a distinct lun. + :paramtype lun: int + :keyword storage_account_type: type of this storage account. Known values are: "Standard_LRS", + "Premium_LRS". Default value: "Standard_LRS". + :paramtype storage_account_type: str or + ~azure.mgmt.machinelearningservices.models.StorageAccountType + """ + super(ComputeInstanceDataDisk, self).__init__(**kwargs) + self.caching = caching + self.disk_size_gb = disk_size_gb + self.lun = lun + self.storage_account_type = storage_account_type + + +class ComputeInstanceDataMount(msrest.serialization.Model): + """Defines an Aml Instance DataMount. + + :ivar source: Source of the ComputeInstance data mount. + :vartype source: str + :ivar source_type: Data source type. Known values are: "Dataset", "Datastore", "URI". + :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType + :ivar mount_name: name of the ComputeInstance data mount. + :vartype mount_name: str + :ivar mount_action: Mount Action. Known values are: "Mount", "Unmount". + :vartype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction + :ivar created_by: who this data mount created by. + :vartype created_by: str + :ivar mount_path: Path of this data mount. + :vartype mount_path: str + :ivar mount_state: Mount state. Known values are: "MountRequested", "Mounted", "MountFailed", + "UnmountRequested", "UnmountFailed", "Unmounted". + :vartype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState + :ivar mounted_on: The time when the disk mounted. + :vartype mounted_on: ~datetime.datetime + :ivar error: Error of this data mount. + :vartype error: str + """ + + _attribute_map = { + 'source': {'key': 'source', 'type': 'str'}, + 'source_type': {'key': 'sourceType', 'type': 'str'}, + 'mount_name': {'key': 'mountName', 'type': 'str'}, + 'mount_action': {'key': 'mountAction', 'type': 'str'}, + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'mount_path': {'key': 'mountPath', 'type': 'str'}, + 'mount_state': {'key': 'mountState', 'type': 'str'}, + 'mounted_on': {'key': 'mountedOn', 'type': 'iso-8601'}, + 'error': {'key': 'error', 'type': 'str'}, + } + + def __init__( + self, + *, + source: Optional[str] = None, + source_type: Optional[Union[str, "_models.SourceType"]] = None, + mount_name: Optional[str] = None, + mount_action: Optional[Union[str, "_models.MountAction"]] = None, + created_by: Optional[str] = None, + mount_path: Optional[str] = None, + mount_state: Optional[Union[str, "_models.MountState"]] = None, + mounted_on: Optional[datetime.datetime] = None, + error: Optional[str] = None, + **kwargs + ): + """ + :keyword source: Source of the ComputeInstance data mount. + :paramtype source: str + :keyword source_type: Data source type. Known values are: "Dataset", "Datastore", "URI". + :paramtype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType + :keyword mount_name: name of the ComputeInstance data mount. + :paramtype mount_name: str + :keyword mount_action: Mount Action. Known values are: "Mount", "Unmount". + :paramtype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction + :keyword created_by: who this data mount created by. + :paramtype created_by: str + :keyword mount_path: Path of this data mount. + :paramtype mount_path: str + :keyword mount_state: Mount state. Known values are: "MountRequested", "Mounted", + "MountFailed", "UnmountRequested", "UnmountFailed", "Unmounted". + :paramtype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState + :keyword mounted_on: The time when the disk mounted. + :paramtype mounted_on: ~datetime.datetime + :keyword error: Error of this data mount. + :paramtype error: str + """ + super(ComputeInstanceDataMount, self).__init__(**kwargs) + self.source = source + self.source_type = source_type + self.mount_name = mount_name + self.mount_action = mount_action + self.created_by = created_by + self.mount_path = mount_path + self.mount_state = mount_state + self.mounted_on = mounted_on + self.error = error + + +class ComputeInstanceEnvironmentInfo(msrest.serialization.Model): + """Environment information. + + :ivar name: name of environment. + :vartype name: str + :ivar version: version of environment. + :vartype version: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + version: Optional[str] = None, + **kwargs + ): + """ + :keyword name: name of environment. + :paramtype name: str + :keyword version: version of environment. + :paramtype version: str + """ + super(ComputeInstanceEnvironmentInfo, self).__init__(**kwargs) + self.name = name + self.version = version + + +class ComputeInstanceLastOperation(msrest.serialization.Model): + """The last operation on ComputeInstance. + + :ivar operation_name: Name of the last operation. Known values are: "Create", "Start", "Stop", + "Restart", "Reimage", "Delete". + :vartype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName + :ivar operation_time: Time of the last operation. + :vartype operation_time: ~datetime.datetime + :ivar operation_status: Operation status. Known values are: "InProgress", "Succeeded", + "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed". + :vartype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus + :ivar operation_trigger: Trigger of operation. Known values are: "User", "Schedule", + "IdleShutdown". + :vartype operation_trigger: str or ~azure.mgmt.machinelearningservices.models.OperationTrigger + """ + + _attribute_map = { + 'operation_name': {'key': 'operationName', 'type': 'str'}, + 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, + 'operation_status': {'key': 'operationStatus', 'type': 'str'}, + 'operation_trigger': {'key': 'operationTrigger', 'type': 'str'}, + } + + def __init__( + self, + *, + operation_name: Optional[Union[str, "_models.OperationName"]] = None, + operation_time: Optional[datetime.datetime] = None, + operation_status: Optional[Union[str, "_models.OperationStatus"]] = None, + operation_trigger: Optional[Union[str, "_models.OperationTrigger"]] = None, + **kwargs + ): + """ + :keyword operation_name: Name of the last operation. Known values are: "Create", "Start", + "Stop", "Restart", "Reimage", "Delete". + :paramtype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName + :keyword operation_time: Time of the last operation. + :paramtype operation_time: ~datetime.datetime + :keyword operation_status: Operation status. Known values are: "InProgress", "Succeeded", + "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed". + :paramtype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus + :keyword operation_trigger: Trigger of operation. Known values are: "User", "Schedule", + "IdleShutdown". + :paramtype operation_trigger: str or + ~azure.mgmt.machinelearningservices.models.OperationTrigger + """ + super(ComputeInstanceLastOperation, self).__init__(**kwargs) + self.operation_name = operation_name + self.operation_time = operation_time + self.operation_status = operation_status + self.operation_trigger = operation_trigger + + +class ComputeInstanceProperties(msrest.serialization.Model): + """Compute Instance properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar vm_size: Virtual Machine Size. + :vartype vm_size: str + :ivar subnet: Virtual network subnet resource ID the compute nodes belong to. + :vartype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :ivar application_sharing_policy: Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access applications on this + compute instance. When Shared, any workspace user can access applications on this instance + depending on his/her assigned role. Known values are: "Personal", "Shared". Default value: + "Shared". + :vartype application_sharing_policy: str or + ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :ivar ssh_settings: Specifies policy and settings for SSH access. + :vartype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings + :ivar custom_services: List of Custom Services added to the compute. + :vartype custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :ivar connectivity_endpoints: Describes all connectivity endpoints available for this + ComputeInstance. + :vartype connectivity_endpoints: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints + :ivar applications: Describes available applications and their endpoints on this + ComputeInstance. + :vartype applications: + list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] + :ivar created_by: Describes information on user who created this ComputeInstance. + :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy + :ivar errors: Collection of errors encountered on this ComputeInstance. + :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar state: The current state of this ComputeInstance. Known values are: "Creating", + "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed", + "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable". + :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState + :ivar compute_instance_authorization_type: The Compute Instance Authorization type. Available + values are personal (default). Known values are: "personal". Default value: "personal". + :vartype compute_instance_authorization_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :ivar personal_compute_instance_settings: Settings for a personal compute instance. + :vartype personal_compute_instance_settings: + ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings + :ivar setup_scripts: Details of customized scripts to execute for setting up the cluster. + :vartype setup_scripts: ~azure.mgmt.machinelearningservices.models.SetupScripts + :ivar last_operation: The last operation on ComputeInstance. + :vartype last_operation: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation + :ivar schedules: The list of schedules to be applied on the computes. + :vartype schedules: ~azure.mgmt.machinelearningservices.models.ComputeSchedules + :ivar idle_time_before_shutdown: Stops compute instance after user defined period of + inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. + :vartype idle_time_before_shutdown: str + :ivar enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :vartype enable_node_public_ip: bool + :ivar containers: Describes informations of containers on this ComputeInstance. + :vartype containers: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceContainer] + :ivar data_disks: Describes informations of dataDisks on this ComputeInstance. + :vartype data_disks: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceDataDisk] + :ivar data_mounts: Describes informations of dataMounts on this ComputeInstance. + :vartype data_mounts: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceDataMount] + :ivar versions: ComputeInstance version. + :vartype versions: ~azure.mgmt.machinelearningservices.models.ComputeInstanceVersion + """ + + _validation = { + 'connectivity_endpoints': {'readonly': True}, + 'applications': {'readonly': True}, + 'created_by': {'readonly': True}, + 'errors': {'readonly': True}, + 'state': {'readonly': True}, + 'last_operation': {'readonly': True}, + 'schedules': {'readonly': True}, + 'containers': {'readonly': True}, + 'data_disks': {'readonly': True}, + 'data_mounts': {'readonly': True}, + 'versions': {'readonly': True}, + } + + _attribute_map = { + 'vm_size': {'key': 'vmSize', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, + 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, + 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, + 'custom_services': {'key': 'customServices', 'type': '[CustomService]'}, + 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, + 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, + 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, + 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'}, + 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'}, + 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'}, + 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, + 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'}, + 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, + 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, + 'containers': {'key': 'containers', 'type': '[ComputeInstanceContainer]'}, + 'data_disks': {'key': 'dataDisks', 'type': '[ComputeInstanceDataDisk]'}, + 'data_mounts': {'key': 'dataMounts', 'type': '[ComputeInstanceDataMount]'}, + 'versions': {'key': 'versions', 'type': 'ComputeInstanceVersion'}, + } + + def __init__( + self, + *, + vm_size: Optional[str] = None, + subnet: Optional["_models.ResourceId"] = None, + application_sharing_policy: Optional[Union[str, "_models.ApplicationSharingPolicy"]] = "Shared", + ssh_settings: Optional["_models.ComputeInstanceSshSettings"] = None, + custom_services: Optional[List["_models.CustomService"]] = None, + compute_instance_authorization_type: Optional[Union[str, "_models.ComputeInstanceAuthorizationType"]] = "personal", + personal_compute_instance_settings: Optional["_models.PersonalComputeInstanceSettings"] = None, + setup_scripts: Optional["_models.SetupScripts"] = None, + idle_time_before_shutdown: Optional[str] = None, + enable_node_public_ip: Optional[bool] = None, + **kwargs + ): + """ + :keyword vm_size: Virtual Machine Size. + :paramtype vm_size: str + :keyword subnet: Virtual network subnet resource ID the compute nodes belong to. + :paramtype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :keyword application_sharing_policy: Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access applications on this + compute instance. When Shared, any workspace user can access applications on this instance + depending on his/her assigned role. Known values are: "Personal", "Shared". Default value: + "Shared". + :paramtype application_sharing_policy: str or + ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :keyword ssh_settings: Specifies policy and settings for SSH access. + :paramtype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings + :keyword custom_services: List of Custom Services added to the compute. + :paramtype custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword compute_instance_authorization_type: The Compute Instance Authorization type. + Available values are personal (default). Known values are: "personal". Default value: + "personal". + :paramtype compute_instance_authorization_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :keyword personal_compute_instance_settings: Settings for a personal compute instance. + :paramtype personal_compute_instance_settings: + ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings + :keyword setup_scripts: Details of customized scripts to execute for setting up the cluster. + :paramtype setup_scripts: ~azure.mgmt.machinelearningservices.models.SetupScripts + :keyword idle_time_before_shutdown: Stops compute instance after user defined period of + inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. + :paramtype idle_time_before_shutdown: str + :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible + values are: Possible values are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private endpoint and no + public IPs. + :paramtype enable_node_public_ip: bool + """ + super(ComputeInstanceProperties, self).__init__(**kwargs) + self.vm_size = vm_size + self.subnet = subnet + self.application_sharing_policy = application_sharing_policy + self.ssh_settings = ssh_settings + self.custom_services = custom_services + self.connectivity_endpoints = None + self.applications = None + self.created_by = None + self.errors = None + self.state = None + self.compute_instance_authorization_type = compute_instance_authorization_type + self.personal_compute_instance_settings = personal_compute_instance_settings + self.setup_scripts = setup_scripts + self.last_operation = None + self.schedules = None + self.idle_time_before_shutdown = idle_time_before_shutdown + self.enable_node_public_ip = enable_node_public_ip + self.containers = None + self.data_disks = None + self.data_mounts = None + self.versions = None + + +class ComputeInstanceSshSettings(msrest.serialization.Model): + """Specifies policy and settings for SSH access. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar ssh_public_access: State of the public SSH port. Possible values are: Disabled - + Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the + public ssh port is open and accessible according to the VNet/subnet policy if applicable. Known + values are: "Enabled", "Disabled". Default value: "Disabled". + :vartype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess + :ivar admin_user_name: Describes the admin user name. + :vartype admin_user_name: str + :ivar ssh_port: Describes the port for connecting through SSH. + :vartype ssh_port: int + :ivar admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t + rsa -b 2048" to generate your SSH key pairs. + :vartype admin_public_key: str + """ + + _validation = { + 'admin_user_name': {'readonly': True}, + 'ssh_port': {'readonly': True}, + } + + _attribute_map = { + 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + } + + def __init__( + self, + *, + ssh_public_access: Optional[Union[str, "_models.SshPublicAccess"]] = "Disabled", + admin_public_key: Optional[str] = None, + **kwargs + ): + """ + :keyword ssh_public_access: State of the public SSH port. Possible values are: Disabled - + Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the + public ssh port is open and accessible according to the VNet/subnet policy if applicable. Known + values are: "Enabled", "Disabled". Default value: "Disabled". + :paramtype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess + :keyword admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen + -t rsa -b 2048" to generate your SSH key pairs. + :paramtype admin_public_key: str + """ + super(ComputeInstanceSshSettings, self).__init__(**kwargs) + self.ssh_public_access = ssh_public_access + self.admin_user_name = None + self.ssh_port = None + self.admin_public_key = admin_public_key + + +class ComputeInstanceVersion(msrest.serialization.Model): + """Version of computeInstance. + + :ivar runtime: Runtime of compute instance. + :vartype runtime: str + """ + + _attribute_map = { + 'runtime': {'key': 'runtime', 'type': 'str'}, + } + + def __init__( + self, + *, + runtime: Optional[str] = None, + **kwargs + ): + """ + :keyword runtime: Runtime of compute instance. + :paramtype runtime: str + """ + super(ComputeInstanceVersion, self).__init__(**kwargs) + self.runtime = runtime + + +class ComputeResourceSchema(msrest.serialization.Model): + """ComputeResourceSchema. + + :ivar properties: Compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.Compute + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'Compute'}, + } + + def __init__( + self, + *, + properties: Optional["_models.Compute"] = None, + **kwargs + ): + """ + :keyword properties: Compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute + """ + super(ComputeResourceSchema, self).__init__(**kwargs) + self.properties = properties + + +class ComputeResource(Resource, ComputeResourceSchema): + """Machine Learning compute object wrapped into ARM resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar properties: Compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.Compute + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: A set of tags. Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'Compute'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + } + + def __init__( + self, + *, + properties: Optional["_models.Compute"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword properties: Compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: A set of tags. Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super(ComputeResource, self).__init__(properties=properties, **kwargs) + self.properties = properties + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.id = None + self.name = None + self.type = None + self.system_data = None + + +class ComputeSchedules(msrest.serialization.Model): + """The list of schedules to be applied on the computes. + + :ivar compute_start_stop: The list of compute start stop schedules to be applied. + :vartype compute_start_stop: + list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] + """ + + _attribute_map = { + 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'}, + } + + def __init__( + self, + *, + compute_start_stop: Optional[List["_models.ComputeStartStopSchedule"]] = None, + **kwargs + ): + """ + :keyword compute_start_stop: The list of compute start stop schedules to be applied. + :paramtype compute_start_stop: + list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] + """ + super(ComputeSchedules, self).__init__(**kwargs) + self.compute_start_stop = compute_start_stop + + +class ComputeStartStopSchedule(msrest.serialization.Model): + """Compute start stop schedule properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Schedule id. + :vartype id: str + :ivar provisioning_status: The current deployment state of schedule. Known values are: + "Completed", "Provisioning", "Failed". + :vartype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningStatus + :ivar action: The compute power action. Known values are: "Start", "Stop". + :vartype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction + :ivar schedule: + :vartype schedule: ~azure.mgmt.machinelearningservices.models.ScheduleBase + """ + + _validation = { + 'id': {'readonly': True}, + 'provisioning_status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, + } + + def __init__( + self, + *, + action: Optional[Union[str, "_models.ComputePowerAction"]] = None, + schedule: Optional["_models.ScheduleBase"] = None, + **kwargs + ): + """ + :keyword action: The compute power action. Known values are: "Start", "Stop". + :paramtype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction + :keyword schedule: + :paramtype schedule: ~azure.mgmt.machinelearningservices.models.ScheduleBase + """ + super(ComputeStartStopSchedule, self).__init__(**kwargs) + self.id = None + self.provisioning_status = None + self.action = action + self.schedule = schedule + + +class ContainerResourceRequirements(msrest.serialization.Model): + """Resource requirements for each container instance within an online deployment. + + :ivar container_resource_limits: Container resource limit info:. + :vartype container_resource_limits: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + :ivar container_resource_requests: Container resource request info:. + :vartype container_resource_requests: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + """ + + _attribute_map = { + 'container_resource_limits': {'key': 'containerResourceLimits', 'type': 'ContainerResourceSettings'}, + 'container_resource_requests': {'key': 'containerResourceRequests', 'type': 'ContainerResourceSettings'}, + } + + def __init__( + self, + *, + container_resource_limits: Optional["_models.ContainerResourceSettings"] = None, + container_resource_requests: Optional["_models.ContainerResourceSettings"] = None, + **kwargs + ): + """ + :keyword container_resource_limits: Container resource limit info:. + :paramtype container_resource_limits: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + :keyword container_resource_requests: Container resource request info:. + :paramtype container_resource_requests: + ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings + """ + super(ContainerResourceRequirements, self).__init__(**kwargs) + self.container_resource_limits = container_resource_limits + self.container_resource_requests = container_resource_requests + + +class ContainerResourceSettings(msrest.serialization.Model): + """ContainerResourceSettings. + + :ivar cpu: Number of vCPUs request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :vartype cpu: str + :ivar gpu: Number of Nvidia GPU cards request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :vartype gpu: str + :ivar memory: Memory size request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :vartype memory: str + """ + + _attribute_map = { + 'cpu': {'key': 'cpu', 'type': 'str'}, + 'gpu': {'key': 'gpu', 'type': 'str'}, + 'memory': {'key': 'memory', 'type': 'str'}, + } + + def __init__( + self, + *, + cpu: Optional[str] = None, + gpu: Optional[str] = None, + memory: Optional[str] = None, + **kwargs + ): + """ + :keyword cpu: Number of vCPUs request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :paramtype cpu: str + :keyword gpu: Number of Nvidia GPU cards request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :paramtype gpu: str + :keyword memory: Memory size request/limit for container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. + :paramtype memory: str + """ + super(ContainerResourceSettings, self).__init__(**kwargs) + self.cpu = cpu + self.gpu = gpu + self.memory = memory + + +class CosmosDbSettings(msrest.serialization.Model): + """CosmosDbSettings. + + :ivar collections_throughput: The throughput of the collections in cosmosdb database. + :vartype collections_throughput: int + """ + + _attribute_map = { + 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'}, + } + + def __init__( + self, + *, + collections_throughput: Optional[int] = None, + **kwargs + ): + """ + :keyword collections_throughput: The throughput of the collections in cosmosdb database. + :paramtype collections_throughput: int + """ + super(CosmosDbSettings, self).__init__(**kwargs) + self.collections_throughput = collections_throughput + + +class TriggerBase(msrest.serialization.Model): + """TriggerBase. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CronTrigger, RecurrenceTrigger. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :vartype end_time: str + :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar trigger_type: Required. [Required].Constant filled by server. Known values are: + "Recurrence", "Cron". + :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + """ + + _validation = { + 'trigger_type': {'required': True}, + } + + _attribute_map = { + 'end_time': {'key': 'endTime', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + } + + _subtype_map = { + 'trigger_type': {'Cron': 'CronTrigger', 'Recurrence': 'RecurrenceTrigger'} + } + + def __init__( + self, + *, + end_time: Optional[str] = None, + start_time: Optional[str] = None, + time_zone: Optional[str] = "UTC", + **kwargs + ): + """ + :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :paramtype end_time: str + :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + """ + super(TriggerBase, self).__init__(**kwargs) + self.end_time = end_time + self.start_time = start_time + self.time_zone = time_zone + self.trigger_type = None # type: Optional[str] + + +class CronTrigger(TriggerBase): + """CronTrigger. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :vartype end_time: str + :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar trigger_type: Required. [Required].Constant filled by server. Known values are: + "Recurrence", "Cron". + :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + :ivar expression: Required. [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. + :vartype expression: str + """ + + _validation = { + 'trigger_type': {'required': True}, + 'expression': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'end_time': {'key': 'endTime', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__( + self, + *, + expression: str, + end_time: Optional[str] = None, + start_time: Optional[str] = None, + time_zone: Optional[str] = "UTC", + **kwargs + ): + """ + :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :paramtype end_time: str + :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + :keyword expression: Required. [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. + :paramtype expression: str + """ + super(CronTrigger, self).__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) + self.trigger_type = 'Cron' # type: str + self.expression = expression + + +class CsvExportSummary(ExportSummary): + """CsvExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar format: Required. [Required] The format of exported labels, also as the + discriminator.Constant filled by server. Known values are: "Dataset", "Coco", "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar container_name: The container name to which the labels will be exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + 'end_date_time': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'format': {'required': True}, + 'labeling_job_id': {'readonly': True}, + 'start_date_time': {'readonly': True}, + 'container_name': {'readonly': True}, + 'snapshot_path': {'readonly': True}, + } + + _attribute_map = { + 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'format': {'key': 'format', 'type': 'str'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(CsvExportSummary, self).__init__(**kwargs) + self.format = 'CSV' # type: str + self.container_name = None + self.snapshot_path = None + + +class CustomForecastHorizon(ForecastHorizon): + """The desired maximum forecast horizon in units of time-series frequency. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode + :ivar value: Required. [Required] Forecast horizon value. + :vartype value: int + """ + + _validation = { + 'mode': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'int'}, + } + + def __init__( + self, + *, + value: int, + **kwargs + ): + """ + :keyword value: Required. [Required] Forecast horizon value. + :paramtype value: int + """ + super(CustomForecastHorizon, self).__init__(**kwargs) + self.mode = 'Custom' # type: str + self.value = value + + +class JobInput(msrest.serialization.Model): + """Command job definition. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CustomModelJobInput, LiteralJobInput, MLFlowModelJobInput, MLTableJobInput, TritonModelJobInput, UriFileJobInput, UriFolderJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + """ + + _validation = { + 'job_input_type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + } + + _subtype_map = { + 'job_input_type': {'custom_model': 'CustomModelJobInput', 'literal': 'LiteralJobInput', 'mlflow_model': 'MLFlowModelJobInput', 'mltable': 'MLTableJobInput', 'triton_model': 'TritonModelJobInput', 'uri_file': 'UriFileJobInput', 'uri_folder': 'UriFolderJobInput'} + } + + def __init__( + self, + *, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword description: Description for the input. + :paramtype description: str + """ + super(JobInput, self).__init__(**kwargs) + self.description = description + self.job_input_type = None # type: Optional[str] + + +class CustomModelJobInput(JobInput, AssetJobInput): + """CustomModelJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: Required. [Required] Input Asset URI. + :vartype uri: str + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + """ + + _validation = { + 'uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'job_input_type': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + } + + def __init__( + self, + *, + uri: str, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: Required. [Required] Input Asset URI. + :paramtype uri: str + :keyword description: Description for the input. + :paramtype description: str + """ + super(CustomModelJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_input_type = 'custom_model' # type: str + self.description = description + + +class JobOutput(msrest.serialization.Model): + """Job output definition container information on where to find job output/logs. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CustomModelJobOutput, MLFlowModelJobOutput, MLTableJobOutput, TritonModelJobOutput, UriFileJobOutput, UriFolderJobOutput. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by + server. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + """ + + _validation = { + 'job_output_type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + } + + _subtype_map = { + 'job_output_type': {'custom_model': 'CustomModelJobOutput', 'mlflow_model': 'MLFlowModelJobOutput', 'mltable': 'MLTableJobOutput', 'triton_model': 'TritonModelJobOutput', 'uri_file': 'UriFileJobOutput', 'uri_folder': 'UriFolderJobOutput'} + } + + def __init__( + self, + *, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword description: Description for the output. + :paramtype description: str + """ + super(JobOutput, self).__init__(**kwargs) + self.description = description + self.job_output_type = None # type: Optional[str] + + +class CustomModelJobOutput(JobOutput, AssetJobOutput): + """CustomModelJobOutput. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by + server. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + """ + + _validation = { + 'job_output_type': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + } + + def __init__( + self, + *, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + :keyword description: Description for the output. + :paramtype description: str + """ + super(CustomModelJobOutput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_output_type = 'custom_model' # type: str + self.description = description + + +class CustomNCrossValidations(NCrossValidations): + """N-Cross validations are specified by user. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode + :ivar value: Required. [Required] N-Cross validations value. + :vartype value: int + """ + + _validation = { + 'mode': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'int'}, + } + + def __init__( + self, + *, + value: int, + **kwargs + ): + """ + :keyword value: Required. [Required] N-Cross validations value. + :paramtype value: int + """ + super(CustomNCrossValidations, self).__init__(**kwargs) + self.mode = 'Custom' # type: str + self.value = value + + +class CustomSeasonality(Seasonality): + """CustomSeasonality. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Known values are: + "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode + :ivar value: Required. [Required] Seasonality value. + :vartype value: int + """ + + _validation = { + 'mode': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'int'}, + } + + def __init__( + self, + *, + value: int, + **kwargs + ): + """ + :keyword value: Required. [Required] Seasonality value. + :paramtype value: int + """ + super(CustomSeasonality, self).__init__(**kwargs) + self.mode = 'Custom' # type: str + self.value = value + + +class CustomService(msrest.serialization.Model): + """Specifies the custom service configuration. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar name: Name of the Custom Service. + :vartype name: str + :ivar image: Describes the Image Specifications. + :vartype image: ~azure.mgmt.machinelearningservices.models.Image + :ivar environment_variables: Environment Variable for the container. + :vartype environment_variables: dict[str, + ~azure.mgmt.machinelearningservices.models.EnvironmentVariable] + :ivar docker: Describes the docker settings for the image. + :vartype docker: ~azure.mgmt.machinelearningservices.models.Docker + :ivar endpoints: Configuring the endpoints for the container. + :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.Endpoint] + :ivar volumes: Configuring the volumes for the container. + :vartype volumes: list[~azure.mgmt.machinelearningservices.models.VolumeDefinition] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'image': {'key': 'image', 'type': 'Image'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{EnvironmentVariable}'}, + 'docker': {'key': 'docker', 'type': 'Docker'}, + 'endpoints': {'key': 'endpoints', 'type': '[Endpoint]'}, + 'volumes': {'key': 'volumes', 'type': '[VolumeDefinition]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + name: Optional[str] = None, + image: Optional["_models.Image"] = None, + environment_variables: Optional[Dict[str, "_models.EnvironmentVariable"]] = None, + docker: Optional["_models.Docker"] = None, + endpoints: Optional[List["_models.Endpoint"]] = None, + volumes: Optional[List["_models.VolumeDefinition"]] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword name: Name of the Custom Service. + :paramtype name: str + :keyword image: Describes the Image Specifications. + :paramtype image: ~azure.mgmt.machinelearningservices.models.Image + :keyword environment_variables: Environment Variable for the container. + :paramtype environment_variables: dict[str, + ~azure.mgmt.machinelearningservices.models.EnvironmentVariable] + :keyword docker: Describes the docker settings for the image. + :paramtype docker: ~azure.mgmt.machinelearningservices.models.Docker + :keyword endpoints: Configuring the endpoints for the container. + :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.Endpoint] + :keyword volumes: Configuring the volumes for the container. + :paramtype volumes: list[~azure.mgmt.machinelearningservices.models.VolumeDefinition] + """ + super(CustomService, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.image = image + self.environment_variables = environment_variables + self.docker = docker + self.endpoints = endpoints + self.volumes = volumes + + +class CustomTargetLags(TargetLags): + """CustomTargetLags. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. + Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode + :ivar values: Required. [Required] Set target lags values. + :vartype values: list[int] + """ + + _validation = { + 'mode': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[int]'}, + } + + def __init__( + self, + *, + values: List[int], + **kwargs + ): + """ + :keyword values: Required. [Required] Set target lags values. + :paramtype values: list[int] + """ + super(CustomTargetLags, self).__init__(**kwargs) + self.mode = 'Custom' # type: str + self.values = values + + +class CustomTargetRollingWindowSize(TargetRollingWindowSize): + """CustomTargetRollingWindowSize. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by + server. Known values are: "Auto", "Custom". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode + :ivar value: Required. [Required] TargetRollingWindowSize value. + :vartype value: int + """ + + _validation = { + 'mode': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'int'}, + } + + def __init__( + self, + *, + value: int, + **kwargs + ): + """ + :keyword value: Required. [Required] TargetRollingWindowSize value. + :paramtype value: int + """ + super(CustomTargetRollingWindowSize, self).__init__(**kwargs) + self.mode = 'Custom' # type: str + self.value = value + + +class DatabricksSchema(msrest.serialization.Model): + """DatabricksSchema. + + :ivar properties: Properties of Databricks. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.DatabricksProperties"] = None, + **kwargs + ): + """ + :keyword properties: Properties of Databricks. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + """ + super(DatabricksSchema, self).__init__(**kwargs) + self.properties = properties + + +class Databricks(Compute, DatabricksSchema): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: Properties of Databricks. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + properties: Optional["_models.DatabricksProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + """ + :keyword properties: Properties of Databricks. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(Databricks, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + self.properties = properties + self.compute_type = 'Databricks' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class DatabricksComputeSecretsProperties(msrest.serialization.Model): + """Properties of Databricks Compute Secrets. + + :ivar databricks_access_token: access token for databricks account. + :vartype databricks_access_token: str + """ + + _attribute_map = { + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + } + + def __init__( + self, + *, + databricks_access_token: Optional[str] = None, + **kwargs + ): + """ + :keyword databricks_access_token: access token for databricks account. + :paramtype databricks_access_token: str + """ + super(DatabricksComputeSecretsProperties, self).__init__(**kwargs) + self.databricks_access_token = databricks_access_token + + +class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsProperties): + """Secrets related to a Machine Learning compute based on Databricks. + + All required parameters must be populated in order to send to Azure. + + :ivar databricks_access_token: access token for databricks account. + :vartype databricks_access_token: str + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + } + + def __init__( + self, + *, + databricks_access_token: Optional[str] = None, + **kwargs + ): + """ + :keyword databricks_access_token: access token for databricks account. + :paramtype databricks_access_token: str + """ + super(DatabricksComputeSecrets, self).__init__(databricks_access_token=databricks_access_token, **kwargs) + self.databricks_access_token = databricks_access_token + self.compute_type = 'Databricks' # type: str + + +class DatabricksProperties(msrest.serialization.Model): + """Properties of Databricks. + + :ivar databricks_access_token: Databricks access token. + :vartype databricks_access_token: str + :ivar workspace_url: Workspace Url. + :vartype workspace_url: str + """ + + _attribute_map = { + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'}, + } + + def __init__( + self, + *, + databricks_access_token: Optional[str] = None, + workspace_url: Optional[str] = None, + **kwargs + ): + """ + :keyword databricks_access_token: Databricks access token. + :paramtype databricks_access_token: str + :keyword workspace_url: Workspace Url. + :paramtype workspace_url: str + """ + super(DatabricksProperties, self).__init__(**kwargs) + self.databricks_access_token = databricks_access_token + self.workspace_url = workspace_url + + +class DataContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'DataContainerProperties'}, + } + + def __init__( + self, + *, + properties: "_models.DataContainerProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties + """ + super(DataContainer, self).__init__(**kwargs) + self.properties = properties + + +class DataContainerProperties(AssetContainer): + """Container for data asset versions. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + :ivar data_type: Required. [Required] Specifies the type of data. Known values are: "uri_file", + "uri_folder", "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + """ + + _validation = { + 'latest_version': {'readonly': True}, + 'next_version': {'readonly': True}, + 'data_type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'latest_version': {'key': 'latestVersion', 'type': 'str'}, + 'next_version': {'key': 'nextVersion', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + } + + def __init__( + self, + *, + data_type: Union[str, "_models.DataType"], + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_type: Required. [Required] Specifies the type of data. Known values are: + "uri_file", "uri_folder", "mltable". + :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + """ + super(DataContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + self.data_type = data_type + + +class DataContainerResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of DataContainer entities. + + :ivar next_link: The link to the next page of DataContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type DataContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[DataContainer]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.DataContainer"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of DataContainer objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type DataContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] + """ + super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class DataFactory(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + """ + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(DataFactory, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) + self.compute_type = 'DataFactory' # type: str + + +class DataLakeAnalyticsSchema(msrest.serialization.Model): + """DataLakeAnalyticsSchema. + + :ivar properties: + :vartype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.DataLakeAnalyticsSchemaProperties"] = None, + **kwargs + ): + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + """ + super(DataLakeAnalyticsSchema, self).__init__(**kwargs) + self.properties = properties + + +class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): + """A DataLakeAnalytics compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: + :vartype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + properties: Optional["_models.DataLakeAnalyticsSchemaProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(DataLakeAnalytics, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + self.properties = properties + self.compute_type = 'DataLakeAnalytics' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class DataLakeAnalyticsSchemaProperties(msrest.serialization.Model): + """DataLakeAnalyticsSchemaProperties. + + :ivar data_lake_store_account_name: DataLake Store Account Name. + :vartype data_lake_store_account_name: str + """ + + _attribute_map = { + 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + } + + def __init__( + self, + *, + data_lake_store_account_name: Optional[str] = None, + **kwargs + ): + """ + :keyword data_lake_store_account_name: DataLake Store Account Name. + :paramtype data_lake_store_account_name: str + """ + super(DataLakeAnalyticsSchemaProperties, self).__init__(**kwargs) + self.data_lake_store_account_name = data_lake_store_account_name + + +class DataPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a datastore. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant + filled by server. Known values are: "Id", "DataPath", "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar datastore_id: ARM resource ID of the datastore where the asset is located. + :vartype datastore_id: str + :ivar path: The path of the file/directory in the datastore. + :vartype path: str + """ + + _validation = { + 'reference_type': {'required': True}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + *, + datastore_id: Optional[str] = None, + path: Optional[str] = None, + **kwargs + ): + """ + :keyword datastore_id: ARM resource ID of the datastore where the asset is located. + :paramtype datastore_id: str + :keyword path: The path of the file/directory in the datastore. + :paramtype path: str + """ + super(DataPathAssetReference, self).__init__(**kwargs) + self.reference_type = 'DataPath' # type: str + self.datastore_id = datastore_id + self.path = path + + +class DatasetExportSummary(ExportSummary): + """DatasetExportSummary. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_date_time: The time when the export was completed. + :vartype end_date_time: ~datetime.datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar format: Required. [Required] The format of exported labels, also as the + discriminator.Constant filled by server. Known values are: "Dataset", "Coco", "CSV". + :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType + :ivar labeling_job_id: Name and identifier of the job containing exported labels. + :vartype labeling_job_id: str + :ivar start_date_time: The time when the export was requested. + :vartype start_date_time: ~datetime.datetime + :ivar labeled_asset_name: The unique name of the labeled data asset. + :vartype labeled_asset_name: str + """ + + _validation = { + 'end_date_time': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'format': {'required': True}, + 'labeling_job_id': {'readonly': True}, + 'start_date_time': {'readonly': True}, + 'labeled_asset_name': {'readonly': True}, + } + + _attribute_map = { + 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'format': {'key': 'format', 'type': 'str'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(DatasetExportSummary, self).__init__(**kwargs) + self.format = 'Dataset' # type: str + self.labeled_asset_name = None + + +class Datastore(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'DatastoreProperties'}, + } + + def __init__( + self, + *, + properties: "_models.DatastoreProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties + """ + super(Datastore, self).__init__(**kwargs) + self.properties = properties + + +class DatastoreResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of Datastore entities. + + :ivar next_link: The link to the next page of Datastore objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type Datastore. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Datastore] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Datastore]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.Datastore"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of Datastore objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type Datastore. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Datastore] + """ + super(DatastoreResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class DataVersionBase(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'DataVersionBaseProperties'}, + } + + def __init__( + self, + *, + properties: "_models.DataVersionBaseProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties + """ + super(DataVersionBase, self).__init__(**kwargs) + self.properties = properties + + +class DataVersionBaseProperties(AssetBase): + """Data version base definition. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MLTableData, UriFileDataVersion, UriFolderDataVersion. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. + Known values are: "uri_file", "uri_folder", "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :vartype data_uri: str + """ + + _validation = { + 'data_type': {'required': True}, + 'data_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'data_uri': {'key': 'dataUri', 'type': 'str'}, + } + + _subtype_map = { + 'data_type': {'mltable': 'MLTableData', 'uri_file': 'UriFileDataVersion', 'uri_folder': 'UriFolderDataVersion'} + } + + def __init__( + self, + *, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :paramtype data_uri: str + """ + super(DataVersionBaseProperties, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + self.data_type = 'DataVersionBaseProperties' # type: str + self.data_uri = data_uri + + +class DataVersionBaseResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of DataVersionBase entities. + + :ivar next_link: The link to the next page of DataVersionBase objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type DataVersionBase. + :vartype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[DataVersionBase]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.DataVersionBase"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of DataVersionBase objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type DataVersionBase. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] + """ + super(DataVersionBaseResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OnlineScaleSettings(msrest.serialization.Model): + """Online deployment scaling configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DefaultScaleSettings, TargetUtilizationScaleSettings. + + All required parameters must be populated in order to send to Azure. + + :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by + server. Known values are: "Default", "TargetUtilization". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType + """ + + _validation = { + 'scale_type': {'required': True}, + } + + _attribute_map = { + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + } + + _subtype_map = { + 'scale_type': {'Default': 'DefaultScaleSettings', 'TargetUtilization': 'TargetUtilizationScaleSettings'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(OnlineScaleSettings, self).__init__(**kwargs) + self.scale_type = None # type: Optional[str] + + +class DefaultScaleSettings(OnlineScaleSettings): + """DefaultScaleSettings. + + All required parameters must be populated in order to send to Azure. + + :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by + server. Known values are: "Default", "TargetUtilization". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType + """ + + _validation = { + 'scale_type': {'required': True}, + } + + _attribute_map = { + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(DefaultScaleSettings, self).__init__(**kwargs) + self.scale_type = 'Default' # type: str + + +class DeploymentLogs(msrest.serialization.Model): + """DeploymentLogs. + + :ivar content: The retrieved online deployment logs. + :vartype content: str + """ + + _attribute_map = { + 'content': {'key': 'content', 'type': 'str'}, + } + + def __init__( + self, + *, + content: Optional[str] = None, + **kwargs + ): + """ + :keyword content: The retrieved online deployment logs. + :paramtype content: str + """ + super(DeploymentLogs, self).__init__(**kwargs) + self.content = content + + +class DeploymentLogsRequest(msrest.serialization.Model): + """DeploymentLogsRequest. + + :ivar container_type: The type of container to retrieve logs from. Known values are: + "StorageInitializer", "InferenceServer". + :vartype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType + :ivar tail: The maximum number of lines to tail. + :vartype tail: int + """ + + _attribute_map = { + 'container_type': {'key': 'containerType', 'type': 'str'}, + 'tail': {'key': 'tail', 'type': 'int'}, + } + + def __init__( + self, + *, + container_type: Optional[Union[str, "_models.ContainerType"]] = None, + tail: Optional[int] = None, + **kwargs + ): + """ + :keyword container_type: The type of container to retrieve logs from. Known values are: + "StorageInitializer", "InferenceServer". + :paramtype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType + :keyword tail: The maximum number of lines to tail. + :paramtype tail: int + """ + super(DeploymentLogsRequest, self).__init__(**kwargs) + self.container_type = container_type + self.tail = tail + + +class ResourceConfiguration(msrest.serialization.Model): + """ResourceConfiguration. + + :ivar instance_count: Optional number of instances or nodes used by the compute target. + :vartype instance_count: int + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar properties: Additional properties bag. + :vartype properties: dict[str, any] + """ + + _attribute_map = { + 'instance_count': {'key': 'instanceCount', 'type': 'int'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{object}'}, + } + + def __init__( + self, + *, + instance_count: Optional[int] = 1, + instance_type: Optional[str] = None, + properties: Optional[Dict[str, Any]] = None, + **kwargs + ): + """ + :keyword instance_count: Optional number of instances or nodes used by the compute target. + :paramtype instance_count: int + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword properties: Additional properties bag. + :paramtype properties: dict[str, any] + """ + super(ResourceConfiguration, self).__init__(**kwargs) + self.instance_count = instance_count + self.instance_type = instance_type + self.properties = properties + + +class DeploymentResourceConfiguration(ResourceConfiguration): + """DeploymentResourceConfiguration. + + :ivar instance_count: Optional number of instances or nodes used by the compute target. + :vartype instance_count: int + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar properties: Additional properties bag. + :vartype properties: dict[str, any] + """ + + _attribute_map = { + 'instance_count': {'key': 'instanceCount', 'type': 'int'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{object}'}, + } + + def __init__( + self, + *, + instance_count: Optional[int] = 1, + instance_type: Optional[str] = None, + properties: Optional[Dict[str, Any]] = None, + **kwargs + ): + """ + :keyword instance_count: Optional number of instances or nodes used by the compute target. + :paramtype instance_count: int + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword properties: Additional properties bag. + :paramtype properties: dict[str, any] + """ + super(DeploymentResourceConfiguration, self).__init__(instance_count=instance_count, instance_type=instance_type, properties=properties, **kwargs) + + +class DiagnoseRequestProperties(msrest.serialization.Model): + """DiagnoseRequestProperties. + + :ivar udr: Setting for diagnosing user defined routing. + :vartype udr: dict[str, any] + :ivar nsg: Setting for diagnosing network security group. + :vartype nsg: dict[str, any] + :ivar resource_lock: Setting for diagnosing resource lock. + :vartype resource_lock: dict[str, any] + :ivar dns_resolution: Setting for diagnosing dns resolution. + :vartype dns_resolution: dict[str, any] + :ivar storage_account: Setting for diagnosing dependent storage account. + :vartype storage_account: dict[str, any] + :ivar key_vault: Setting for diagnosing dependent key vault. + :vartype key_vault: dict[str, any] + :ivar container_registry: Setting for diagnosing dependent container registry. + :vartype container_registry: dict[str, any] + :ivar application_insights: Setting for diagnosing dependent application insights. + :vartype application_insights: dict[str, any] + :ivar others: Setting for diagnosing unclassified category of problems. + :vartype others: dict[str, any] + """ + + _attribute_map = { + 'udr': {'key': 'udr', 'type': '{object}'}, + 'nsg': {'key': 'nsg', 'type': '{object}'}, + 'resource_lock': {'key': 'resourceLock', 'type': '{object}'}, + 'dns_resolution': {'key': 'dnsResolution', 'type': '{object}'}, + 'storage_account': {'key': 'storageAccount', 'type': '{object}'}, + 'key_vault': {'key': 'keyVault', 'type': '{object}'}, + 'container_registry': {'key': 'containerRegistry', 'type': '{object}'}, + 'application_insights': {'key': 'applicationInsights', 'type': '{object}'}, + 'others': {'key': 'others', 'type': '{object}'}, + } + + def __init__( + self, + *, + udr: Optional[Dict[str, Any]] = None, + nsg: Optional[Dict[str, Any]] = None, + resource_lock: Optional[Dict[str, Any]] = None, + dns_resolution: Optional[Dict[str, Any]] = None, + storage_account: Optional[Dict[str, Any]] = None, + key_vault: Optional[Dict[str, Any]] = None, + container_registry: Optional[Dict[str, Any]] = None, + application_insights: Optional[Dict[str, Any]] = None, + others: Optional[Dict[str, Any]] = None, + **kwargs + ): + """ + :keyword udr: Setting for diagnosing user defined routing. + :paramtype udr: dict[str, any] + :keyword nsg: Setting for diagnosing network security group. + :paramtype nsg: dict[str, any] + :keyword resource_lock: Setting for diagnosing resource lock. + :paramtype resource_lock: dict[str, any] + :keyword dns_resolution: Setting for diagnosing dns resolution. + :paramtype dns_resolution: dict[str, any] + :keyword storage_account: Setting for diagnosing dependent storage account. + :paramtype storage_account: dict[str, any] + :keyword key_vault: Setting for diagnosing dependent key vault. + :paramtype key_vault: dict[str, any] + :keyword container_registry: Setting for diagnosing dependent container registry. + :paramtype container_registry: dict[str, any] + :keyword application_insights: Setting for diagnosing dependent application insights. + :paramtype application_insights: dict[str, any] + :keyword others: Setting for diagnosing unclassified category of problems. + :paramtype others: dict[str, any] + """ + super(DiagnoseRequestProperties, self).__init__(**kwargs) + self.udr = udr + self.nsg = nsg + self.resource_lock = resource_lock + self.dns_resolution = dns_resolution + self.storage_account = storage_account + self.key_vault = key_vault + self.container_registry = container_registry + self.application_insights = application_insights + self.others = others + + +class DiagnoseResponseResult(msrest.serialization.Model): + """DiagnoseResponseResult. + + :ivar value: + :vartype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'DiagnoseResponseResultValue'}, + } + + def __init__( + self, + *, + value: Optional["_models.DiagnoseResponseResultValue"] = None, + **kwargs + ): + """ + :keyword value: + :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue + """ + super(DiagnoseResponseResult, self).__init__(**kwargs) + self.value = value + + +class DiagnoseResponseResultValue(msrest.serialization.Model): + """DiagnoseResponseResultValue. + + :ivar user_defined_route_results: + :vartype user_defined_route_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar network_security_rule_results: + :vartype network_security_rule_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar resource_lock_results: + :vartype resource_lock_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar dns_resolution_results: + :vartype dns_resolution_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar storage_account_results: + :vartype storage_account_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar key_vault_results: + :vartype key_vault_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar container_registry_results: + :vartype container_registry_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar application_insights_results: + :vartype application_insights_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :ivar other_results: + :vartype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + """ + + _attribute_map = { + 'user_defined_route_results': {'key': 'userDefinedRouteResults', 'type': '[DiagnoseResult]'}, + 'network_security_rule_results': {'key': 'networkSecurityRuleResults', 'type': '[DiagnoseResult]'}, + 'resource_lock_results': {'key': 'resourceLockResults', 'type': '[DiagnoseResult]'}, + 'dns_resolution_results': {'key': 'dnsResolutionResults', 'type': '[DiagnoseResult]'}, + 'storage_account_results': {'key': 'storageAccountResults', 'type': '[DiagnoseResult]'}, + 'key_vault_results': {'key': 'keyVaultResults', 'type': '[DiagnoseResult]'}, + 'container_registry_results': {'key': 'containerRegistryResults', 'type': '[DiagnoseResult]'}, + 'application_insights_results': {'key': 'applicationInsightsResults', 'type': '[DiagnoseResult]'}, + 'other_results': {'key': 'otherResults', 'type': '[DiagnoseResult]'}, + } + + def __init__( + self, + *, + user_defined_route_results: Optional[List["_models.DiagnoseResult"]] = None, + network_security_rule_results: Optional[List["_models.DiagnoseResult"]] = None, + resource_lock_results: Optional[List["_models.DiagnoseResult"]] = None, + dns_resolution_results: Optional[List["_models.DiagnoseResult"]] = None, + storage_account_results: Optional[List["_models.DiagnoseResult"]] = None, + key_vault_results: Optional[List["_models.DiagnoseResult"]] = None, + container_registry_results: Optional[List["_models.DiagnoseResult"]] = None, + application_insights_results: Optional[List["_models.DiagnoseResult"]] = None, + other_results: Optional[List["_models.DiagnoseResult"]] = None, + **kwargs + ): + """ + :keyword user_defined_route_results: + :paramtype user_defined_route_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword network_security_rule_results: + :paramtype network_security_rule_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword resource_lock_results: + :paramtype resource_lock_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword dns_resolution_results: + :paramtype dns_resolution_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword storage_account_results: + :paramtype storage_account_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword key_vault_results: + :paramtype key_vault_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword container_registry_results: + :paramtype container_registry_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword application_insights_results: + :paramtype application_insights_results: + list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + :keyword other_results: + :paramtype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] + """ + super(DiagnoseResponseResultValue, self).__init__(**kwargs) + self.user_defined_route_results = user_defined_route_results + self.network_security_rule_results = network_security_rule_results + self.resource_lock_results = resource_lock_results + self.dns_resolution_results = dns_resolution_results + self.storage_account_results = storage_account_results + self.key_vault_results = key_vault_results + self.container_registry_results = container_registry_results + self.application_insights_results = application_insights_results + self.other_results = other_results + + +class DiagnoseResult(msrest.serialization.Model): + """Result of Diagnose. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Code for workspace setup error. + :vartype code: str + :ivar level: Level of workspace setup error. Known values are: "Warning", "Error", + "Information". + :vartype level: str or ~azure.mgmt.machinelearningservices.models.DiagnoseResultLevel + :ivar message: Message of workspace setup error. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'level': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(DiagnoseResult, self).__init__(**kwargs) + self.code = None + self.level = None + self.message = None + + +class DiagnoseWorkspaceParameters(msrest.serialization.Model): + """Parameters to diagnose a workspace. + + :ivar value: Value of Parameters. + :vartype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'DiagnoseRequestProperties'}, + } + + def __init__( + self, + *, + value: Optional["_models.DiagnoseRequestProperties"] = None, + **kwargs + ): + """ + :keyword value: Value of Parameters. + :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties + """ + super(DiagnoseWorkspaceParameters, self).__init__(**kwargs) + self.value = value + + +class DistributionConfiguration(msrest.serialization.Model): + """Base definition for job distribution configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: Mpi, PyTorch, TensorFlow. + + All required parameters must be populated in order to send to Azure. + + :ivar distribution_type: Required. [Required] Specifies the type of distribution + framework.Constant filled by server. Known values are: "PyTorch", "TensorFlow", "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + } + + _subtype_map = { + 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'TensorFlow': 'TensorFlow'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(DistributionConfiguration, self).__init__(**kwargs) + self.distribution_type = None # type: Optional[str] + + +class Docker(msrest.serialization.Model): + """Docker. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar privileged: Indicate whether container shall run in privileged or non-privileged mode. + :vartype privileged: bool + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'privileged': {'key': 'privileged', 'type': 'bool'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + privileged: Optional[bool] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword privileged: Indicate whether container shall run in privileged or non-privileged mode. + :paramtype privileged: bool + """ + super(Docker, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.privileged = privileged + + +class EncryptionKeyVaultProperties(msrest.serialization.Model): + """EncryptionKeyVaultProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned encryption + key is present. + :vartype key_vault_arm_id: str + :ivar key_identifier: Required. Key vault uri to access the encryption key. + :vartype key_identifier: str + :ivar identity_client_id: For future use - The client id of the identity which will be used to + access key vault. + :vartype identity_client_id: str + """ + + _validation = { + 'key_vault_arm_id': {'required': True}, + 'key_identifier': {'required': True}, + } + + _attribute_map = { + 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, + 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, + } + + def __init__( + self, + *, + key_vault_arm_id: str, + key_identifier: str, + identity_client_id: Optional[str] = None, + **kwargs + ): + """ + :keyword key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned + encryption key is present. + :paramtype key_vault_arm_id: str + :keyword key_identifier: Required. Key vault uri to access the encryption key. + :paramtype key_identifier: str + :keyword identity_client_id: For future use - The client id of the identity which will be used + to access key vault. + :paramtype identity_client_id: str + """ + super(EncryptionKeyVaultProperties, self).__init__(**kwargs) + self.key_vault_arm_id = key_vault_arm_id + self.key_identifier = key_identifier + self.identity_client_id = identity_client_id + + +class EncryptionKeyVaultUpdateProperties(msrest.serialization.Model): + """EncryptionKeyVaultUpdateProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar key_identifier: Required. Key Vault uri to access the encryption key. + :vartype key_identifier: str + """ + + _validation = { + 'key_identifier': {'required': True}, + } + + _attribute_map = { + 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + } + + def __init__( + self, + *, + key_identifier: str, + **kwargs + ): + """ + :keyword key_identifier: Required. Key Vault uri to access the encryption key. + :paramtype key_identifier: str + """ + super(EncryptionKeyVaultUpdateProperties, self).__init__(**kwargs) + self.key_identifier = key_identifier + + +class EncryptionProperty(msrest.serialization.Model): + """EncryptionProperty. + + All required parameters must be populated in order to send to Azure. + + :ivar status: Required. Indicates whether or not the encryption is enabled for the workspace. + Known values are: "Enabled", "Disabled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus + :ivar identity: The identity that will be used to access the key vault for encryption at rest. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :ivar key_vault_properties: Required. Customer Key vault properties. + :vartype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultProperties + """ + + _validation = { + 'status': {'required': True}, + 'key_vault_properties': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityForCmk'}, + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionKeyVaultProperties'}, + } + + def __init__( + self, + *, + status: Union[str, "_models.EncryptionStatus"], + key_vault_properties: "_models.EncryptionKeyVaultProperties", + identity: Optional["_models.IdentityForCmk"] = None, + **kwargs + ): + """ + :keyword status: Required. Indicates whether or not the encryption is enabled for the + workspace. Known values are: "Enabled", "Disabled". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus + :keyword identity: The identity that will be used to access the key vault for encryption at + rest. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :keyword key_vault_properties: Required. Customer Key vault properties. + :paramtype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultProperties + """ + super(EncryptionProperty, self).__init__(**kwargs) + self.status = status + self.identity = identity + self.key_vault_properties = key_vault_properties + + +class EncryptionUpdateProperties(msrest.serialization.Model): + """EncryptionUpdateProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar key_vault_properties: Required. Customer Key vault properties. + :vartype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties + """ + + _validation = { + 'key_vault_properties': {'required': True}, + } + + _attribute_map = { + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionKeyVaultUpdateProperties'}, + } + + def __init__( + self, + *, + key_vault_properties: "_models.EncryptionKeyVaultUpdateProperties", + **kwargs + ): + """ + :keyword key_vault_properties: Required. Customer Key vault properties. + :paramtype key_vault_properties: + ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties + """ + super(EncryptionUpdateProperties, self).__init__(**kwargs) + self.key_vault_properties = key_vault_properties + + +class Endpoint(msrest.serialization.Model): + """Endpoint. + + :ivar protocol: Protocol over which communication will happen over this endpoint. Known values + are: "tcp", "udp", "http". Default value: "tcp". + :vartype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol + :ivar name: Name of the Endpoint. + :vartype name: str + :ivar target: Application port inside the container. + :vartype target: int + :ivar published: Port over which the application is exposed from container. + :vartype published: int + :ivar host_ip: Host IP over which the application is exposed from the container. + :vartype host_ip: str + """ + + _attribute_map = { + 'protocol': {'key': 'protocol', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'int'}, + 'published': {'key': 'published', 'type': 'int'}, + 'host_ip': {'key': 'hostIp', 'type': 'str'}, + } + + def __init__( + self, + *, + protocol: Optional[Union[str, "_models.Protocol"]] = "tcp", + name: Optional[str] = None, + target: Optional[int] = None, + published: Optional[int] = None, + host_ip: Optional[str] = None, + **kwargs + ): + """ + :keyword protocol: Protocol over which communication will happen over this endpoint. Known + values are: "tcp", "udp", "http". Default value: "tcp". + :paramtype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol + :keyword name: Name of the Endpoint. + :paramtype name: str + :keyword target: Application port inside the container. + :paramtype target: int + :keyword published: Port over which the application is exposed from container. + :paramtype published: int + :keyword host_ip: Host IP over which the application is exposed from the container. + :paramtype host_ip: str + """ + super(Endpoint, self).__init__(**kwargs) + self.protocol = protocol + self.name = name + self.target = target + self.published = published + self.host_ip = host_ip + + +class EndpointAuthKeys(msrest.serialization.Model): + """Keys for endpoint authentication. + + :ivar primary_key: The primary key. + :vartype primary_key: str + :ivar secondary_key: The secondary key. + :vartype secondary_key: str + """ + + _attribute_map = { + 'primary_key': {'key': 'primaryKey', 'type': 'str'}, + 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, + } + + def __init__( + self, + *, + primary_key: Optional[str] = None, + secondary_key: Optional[str] = None, + **kwargs + ): + """ + :keyword primary_key: The primary key. + :paramtype primary_key: str + :keyword secondary_key: The secondary key. + :paramtype secondary_key: str + """ + super(EndpointAuthKeys, self).__init__(**kwargs) + self.primary_key = primary_key + self.secondary_key = secondary_key + + +class EndpointAuthToken(msrest.serialization.Model): + """Service Token. + + :ivar access_token: Access token for endpoint authentication. + :vartype access_token: str + :ivar expiry_time_utc: Access token expiry time (UTC). + :vartype expiry_time_utc: long + :ivar refresh_after_time_utc: Refresh access token after time (UTC). + :vartype refresh_after_time_utc: long + :ivar token_type: Access token type. + :vartype token_type: str + """ + + _attribute_map = { + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'}, + 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'}, + 'token_type': {'key': 'tokenType', 'type': 'str'}, + } + + def __init__( + self, + *, + access_token: Optional[str] = None, + expiry_time_utc: Optional[int] = 0, + refresh_after_time_utc: Optional[int] = 0, + token_type: Optional[str] = None, + **kwargs + ): + """ + :keyword access_token: Access token for endpoint authentication. + :paramtype access_token: str + :keyword expiry_time_utc: Access token expiry time (UTC). + :paramtype expiry_time_utc: long + :keyword refresh_after_time_utc: Refresh access token after time (UTC). + :paramtype refresh_after_time_utc: long + :keyword token_type: Access token type. + :paramtype token_type: str + """ + super(EndpointAuthToken, self).__init__(**kwargs) + self.access_token = access_token + self.expiry_time_utc = expiry_time_utc + self.refresh_after_time_utc = refresh_after_time_utc + self.token_type = token_type + + +class ScheduleActionBase(msrest.serialization.Model): + """ScheduleActionBase. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: JobScheduleAction, EndpointScheduleAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant + filled by server. Known values are: "CreateJob", "InvokeBatchEndpoint". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + """ + + _validation = { + 'action_type': {'required': True}, + } + + _attribute_map = { + 'action_type': {'key': 'actionType', 'type': 'str'}, + } + + _subtype_map = { + 'action_type': {'CreateJob': 'JobScheduleAction', 'InvokeBatchEndpoint': 'EndpointScheduleAction'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ScheduleActionBase, self).__init__(**kwargs) + self.action_type = None # type: Optional[str] + + +class EndpointScheduleAction(ScheduleActionBase): + """EndpointScheduleAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant + filled by server. Known values are: "CreateJob", "InvokeBatchEndpoint". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + :ivar endpoint_invocation_definition: Required. [Required] Defines Schedule action definition + details. + + + .. raw:: html + + . + :vartype endpoint_invocation_definition: any + """ + + _validation = { + 'action_type': {'required': True}, + 'endpoint_invocation_definition': {'required': True}, + } + + _attribute_map = { + 'action_type': {'key': 'actionType', 'type': 'str'}, + 'endpoint_invocation_definition': {'key': 'endpointInvocationDefinition', 'type': 'object'}, + } + + def __init__( + self, + *, + endpoint_invocation_definition: Any, + **kwargs + ): + """ + :keyword endpoint_invocation_definition: Required. [Required] Defines Schedule action + definition details. + + + .. raw:: html + + . + :paramtype endpoint_invocation_definition: any + """ + super(EndpointScheduleAction, self).__init__(**kwargs) + self.action_type = 'InvokeBatchEndpoint' # type: str + self.endpoint_invocation_definition = endpoint_invocation_definition + + +class EnvironmentContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'EnvironmentContainerProperties'}, + } + + def __init__( + self, + *, + properties: "_models.EnvironmentContainerProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties + """ + super(EnvironmentContainer, self).__init__(**kwargs) + self.properties = properties + + +class EnvironmentContainerProperties(AssetContainer): + """Container for environment specification versions. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + """ + + _validation = { + 'latest_version': {'readonly': True}, + 'next_version': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'latest_version': {'key': 'latestVersion', 'type': 'str'}, + 'next_version': {'key': 'nextVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super(EnvironmentContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + + +class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of EnvironmentContainer entities. + + :ivar next_link: The link to the next page of EnvironmentContainer objects. If null, there are + no additional pages. + :vartype next_link: str + :ivar value: An array of objects of type EnvironmentContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[EnvironmentContainer]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.EnvironmentContainer"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of EnvironmentContainer objects. If null, there + are no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type EnvironmentContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + """ + super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class EnvironmentVariable(msrest.serialization.Model): + """EnvironmentVariable. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Type of the Environment Variable. Possible values are: local - For local variable. + Known values are: "local". Default value: "local". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType + :ivar value: Value of the Environment variable. + :vartype value: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + type: Optional[Union[str, "_models.EnvironmentVariableType"]] = "local", + value: Optional[str] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword type: Type of the Environment Variable. Possible values are: local - For local + variable. Known values are: "local". Default value: "local". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType + :keyword value: Value of the Environment variable. + :paramtype value: str + """ + super(EnvironmentVariable, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.value = value + + +class EnvironmentVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'EnvironmentVersionProperties'}, + } + + def __init__( + self, + *, + properties: "_models.EnvironmentVersionProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties + """ + super(EnvironmentVersion, self).__init__(**kwargs) + self.properties = properties + + +class EnvironmentVersionProperties(AssetBase): + """Environment version details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar build: Configuration settings for Docker build context. + :vartype build: ~azure.mgmt.machinelearningservices.models.BuildContext + :ivar conda_file: Standard configuration file used by Conda that lets you install any kind of + package, including Python, R, and C/C++ packages. + + + .. raw:: html + + . + :vartype conda_file: str + :ivar environment_type: Environment type is either user managed or curated by the Azure ML + service + + + .. raw:: html + + . Known values are: "Curated", "UserCreated". + :vartype environment_type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentType + :ivar image: Name of the image that will be used for the environment. + + + .. raw:: html + + . + :vartype image: str + :ivar inference_config: Defines configuration specific to inference. + :vartype inference_config: + ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :ivar os_type: The OS type of the environment. Known values are: "Linux", "Windows". + :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType + """ + + _validation = { + 'environment_type': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'build': {'key': 'build', 'type': 'BuildContext'}, + 'conda_file': {'key': 'condaFile', 'type': 'str'}, + 'environment_type': {'key': 'environmentType', 'type': 'str'}, + 'image': {'key': 'image', 'type': 'str'}, + 'inference_config': {'key': 'inferenceConfig', 'type': 'InferenceContainerProperties'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, + build: Optional["_models.BuildContext"] = None, + conda_file: Optional[str] = None, + image: Optional[str] = None, + inference_config: Optional["_models.InferenceContainerProperties"] = None, + os_type: Optional[Union[str, "_models.OperatingSystemType"]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword build: Configuration settings for Docker build context. + :paramtype build: ~azure.mgmt.machinelearningservices.models.BuildContext + :keyword conda_file: Standard configuration file used by Conda that lets you install any kind + of package, including Python, R, and C/C++ packages. + + + .. raw:: html + + . + :paramtype conda_file: str + :keyword image: Name of the image that will be used for the environment. + + + .. raw:: html + + . + :paramtype image: str + :keyword inference_config: Defines configuration specific to inference. + :paramtype inference_config: + ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :keyword os_type: The OS type of the environment. Known values are: "Linux", "Windows". + :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType + """ + super(EnvironmentVersionProperties, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + self.build = build + self.conda_file = conda_file + self.environment_type = None + self.image = image + self.inference_config = inference_config + self.os_type = os_type + + +class EnvironmentVersionResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of EnvironmentVersion entities. + + :ivar next_link: The link to the next page of EnvironmentVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type EnvironmentVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[EnvironmentVersion]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.EnvironmentVersion"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of EnvironmentVersion objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type EnvironmentVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + """ + super(EnvironmentVersionResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: any + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(msrest.serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ErrorDetail, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class ErrorResponse(msrest.serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). + + :ivar error: The error object. + :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + error: Optional["_models.ErrorDetail"] = None, + **kwargs + ): + """ + :keyword error: The error object. + :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail + """ + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class EstimatedVMPrice(msrest.serialization.Model): + """The estimated price info for using a VM of a particular OS type, tier, etc. + + All required parameters must be populated in order to send to Azure. + + :ivar retail_price: Required. The price charged for using the VM. + :vartype retail_price: float + :ivar os_type: Required. Operating system type used by the VM. Known values are: "Linux", + "Windows". + :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType + :ivar vm_tier: Required. The type of the VM. Known values are: "Standard", "LowPriority", + "Spot". + :vartype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + """ + + _validation = { + 'retail_price': {'required': True}, + 'os_type': {'required': True}, + 'vm_tier': {'required': True}, + } + + _attribute_map = { + 'retail_price': {'key': 'retailPrice', 'type': 'float'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'vm_tier': {'key': 'vmTier', 'type': 'str'}, + } + + def __init__( + self, + *, + retail_price: float, + os_type: Union[str, "_models.VMPriceOSType"], + vm_tier: Union[str, "_models.VMTier"], + **kwargs + ): + """ + :keyword retail_price: Required. The price charged for using the VM. + :paramtype retail_price: float + :keyword os_type: Required. Operating system type used by the VM. Known values are: "Linux", + "Windows". + :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType + :keyword vm_tier: Required. The type of the VM. Known values are: "Standard", "LowPriority", + "Spot". + :paramtype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + """ + super(EstimatedVMPrice, self).__init__(**kwargs) + self.retail_price = retail_price + self.os_type = os_type + self.vm_tier = vm_tier + + +class EstimatedVMPrices(msrest.serialization.Model): + """The estimated price info for using a VM. + + All required parameters must be populated in order to send to Azure. + + :ivar billing_currency: Required. Three lettered code specifying the currency of the VM price. + Example: USD. Known values are: "USD". + :vartype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency + :ivar unit_of_measure: Required. The unit of time measurement for the specified VM price. + Example: OneHour. Known values are: "OneHour". + :vartype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure + :ivar values: Required. The list of estimated prices for using a VM of a particular OS type, + tier, etc. + :vartype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + """ + + _validation = { + 'billing_currency': {'required': True}, + 'unit_of_measure': {'required': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, + 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, + } + + def __init__( + self, + *, + billing_currency: Union[str, "_models.BillingCurrency"], + unit_of_measure: Union[str, "_models.UnitOfMeasure"], + values: List["_models.EstimatedVMPrice"], + **kwargs + ): + """ + :keyword billing_currency: Required. Three lettered code specifying the currency of the VM + price. Example: USD. Known values are: "USD". + :paramtype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency + :keyword unit_of_measure: Required. The unit of time measurement for the specified VM price. + Example: OneHour. Known values are: "OneHour". + :paramtype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure + :keyword values: Required. The list of estimated prices for using a VM of a particular OS type, + tier, etc. + :paramtype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + """ + super(EstimatedVMPrices, self).__init__(**kwargs) + self.billing_currency = billing_currency + self.unit_of_measure = unit_of_measure + self.values = values + + +class ExternalFQDNResponse(msrest.serialization.Model): + """ExternalFQDNResponse. + + :ivar value: + :vartype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoints] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FQDNEndpoints]'}, + } + + def __init__( + self, + *, + value: Optional[List["_models.FQDNEndpoints"]] = None, + **kwargs + ): + """ + :keyword value: + :paramtype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoints] + """ + super(ExternalFQDNResponse, self).__init__(**kwargs) + self.value = value + + +class FeaturizationSettings(msrest.serialization.Model): + """Featurization Configuration. + + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str + """ + + _attribute_map = { + 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, + } + + def __init__( + self, + *, + dataset_language: Optional[str] = None, + **kwargs + ): + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + """ + super(FeaturizationSettings, self).__init__(**kwargs) + self.dataset_language = dataset_language + + +class FlavorData(msrest.serialization.Model): + """FlavorData. + + :ivar data: Model flavor-specific data. + :vartype data: dict[str, str] + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': '{str}'}, + } + + def __init__( + self, + *, + data: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword data: Model flavor-specific data. + :paramtype data: dict[str, str] + """ + super(FlavorData, self).__init__(**kwargs) + self.data = data + + +class Forecasting(AutoMLVertical, TableVertical): + """Forecasting task in AutoML Table vertical. + + All required parameters must be populated in order to send to Azure. + + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar forecasting_settings: Forecasting task specific inputs. + :vartype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings + :ivar primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + "NormalizedMeanAbsoluteError". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics + :ivar training_settings: Inputs for training phase for an AutoML Job. + :vartype training_settings: + ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings + """ + + _validation = { + 'task_type': {'required': True}, + 'training_data': {'required': True}, + } + + _attribute_map = { + 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, + 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, + 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, + 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + 'training_settings': {'key': 'trainingSettings', 'type': 'ForecastingTrainingSettings'}, + } + + def __init__( + self, + *, + training_data: "_models.MLTableJobInput", + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + forecasting_settings: Optional["_models.ForecastingSettings"] = None, + primary_metric: Optional[Union[str, "_models.ForecastingPrimaryMetrics"]] = None, + training_settings: Optional["_models.ForecastingTrainingSettings"] = None, + **kwargs + ): + """ + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword forecasting_settings: Forecasting task specific inputs. + :paramtype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings + :keyword primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + "NormalizedMeanAbsoluteError". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics + :keyword training_settings: Inputs for training phase for an AutoML Job. + :paramtype training_settings: + ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings + """ + super(Forecasting, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, limit_settings=limit_settings, n_cross_validations=n_cross_validations, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, validation_data_size=validation_data_size, weight_column_name=weight_column_name, **kwargs) + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + self.task_type = 'Forecasting' # type: str + self.forecasting_settings = forecasting_settings + self.primary_metric = primary_metric + self.training_settings = training_settings + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class ForecastingSettings(msrest.serialization.Model): + """Forecasting specific parameters. + + :ivar country_or_region_for_holidays: Country or region for holidays for forecasting tasks. + These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. + :vartype country_or_region_for_holidays: str + :ivar cv_step_size: Number of periods between the origin time of one CV fold and the next fold. + For + example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be + three days apart. + :vartype cv_step_size: int + :ivar feature_lags: Flag for generating lags for the numeric features with 'auto' or null. + Known values are: "None", "Auto". + :vartype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags + :ivar forecast_horizon: The desired maximum forecast horizon in units of time-series frequency. + :vartype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon + :ivar frequency: When forecasting, this parameter represents the period with which the forecast + is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset frequency + by default. + :vartype frequency: str + :ivar seasonality: Set time series seasonality as an integer multiple of the series frequency. + If seasonality is set to 'auto', it will be inferred. + :vartype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality + :ivar short_series_handling_config: The parameter defining how if AutoML should handle short + time series. Known values are: "None", "Auto", "Pad", "Drop". + :vartype short_series_handling_config: str or + ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration + :ivar target_aggregate_function: The function to be used to aggregate the time series target + column to conform to a user specified frequency. + If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the + error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". + Known values are: "None", "Sum", "Max", "Min", "Mean". + :vartype target_aggregate_function: str or + ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction + :ivar target_lags: The number of past periods to lag from the target column. + :vartype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags + :ivar target_rolling_window_size: The number of past periods used to create a rolling window + average of the target column. + :vartype target_rolling_window_size: + ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize + :ivar time_column_name: The name of the time column. This parameter is required when + forecasting to specify the datetime column in the input data used for building the time series + and inferring its frequency. + :vartype time_column_name: str + :ivar time_series_id_column_names: The names of columns used to group a timeseries. It can be + used to create multiple series. + If grain is not defined, the data set is assumed to be one time-series. This parameter is used + with task type forecasting. + :vartype time_series_id_column_names: list[str] + :ivar use_stl: Configure STL Decomposition of the time-series target column. Known values are: + "None", "Season", "SeasonTrend". + :vartype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl + """ + + _attribute_map = { + 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, + 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, + 'feature_lags': {'key': 'featureLags', 'type': 'str'}, + 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'seasonality': {'key': 'seasonality', 'type': 'Seasonality'}, + 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, + 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, + 'target_lags': {'key': 'targetLags', 'type': 'TargetLags'}, + 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'}, + 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, + 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, + 'use_stl': {'key': 'useStl', 'type': 'str'}, + } + + def __init__( + self, + *, + country_or_region_for_holidays: Optional[str] = None, + cv_step_size: Optional[int] = None, + feature_lags: Optional[Union[str, "_models.FeatureLags"]] = None, + forecast_horizon: Optional["_models.ForecastHorizon"] = None, + frequency: Optional[str] = None, + seasonality: Optional["_models.Seasonality"] = None, + short_series_handling_config: Optional[Union[str, "_models.ShortSeriesHandlingConfiguration"]] = None, + target_aggregate_function: Optional[Union[str, "_models.TargetAggregationFunction"]] = None, + target_lags: Optional["_models.TargetLags"] = None, + target_rolling_window_size: Optional["_models.TargetRollingWindowSize"] = None, + time_column_name: Optional[str] = None, + time_series_id_column_names: Optional[List[str]] = None, + use_stl: Optional[Union[str, "_models.UseStl"]] = None, + **kwargs + ): + """ + :keyword country_or_region_for_holidays: Country or region for holidays for forecasting tasks. + These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. + :paramtype country_or_region_for_holidays: str + :keyword cv_step_size: Number of periods between the origin time of one CV fold and the next + fold. For + example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be + three days apart. + :paramtype cv_step_size: int + :keyword feature_lags: Flag for generating lags for the numeric features with 'auto' or null. + Known values are: "None", "Auto". + :paramtype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags + :keyword forecast_horizon: The desired maximum forecast horizon in units of time-series + frequency. + :paramtype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon + :keyword frequency: When forecasting, this parameter represents the period with which the + forecast is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset + frequency by default. + :paramtype frequency: str + :keyword seasonality: Set time series seasonality as an integer multiple of the series + frequency. + If seasonality is set to 'auto', it will be inferred. + :paramtype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality + :keyword short_series_handling_config: The parameter defining how if AutoML should handle short + time series. Known values are: "None", "Auto", "Pad", "Drop". + :paramtype short_series_handling_config: str or + ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration + :keyword target_aggregate_function: The function to be used to aggregate the time series target + column to conform to a user specified frequency. + If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the + error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". + Known values are: "None", "Sum", "Max", "Min", "Mean". + :paramtype target_aggregate_function: str or + ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction + :keyword target_lags: The number of past periods to lag from the target column. + :paramtype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags + :keyword target_rolling_window_size: The number of past periods used to create a rolling window + average of the target column. + :paramtype target_rolling_window_size: + ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize + :keyword time_column_name: The name of the time column. This parameter is required when + forecasting to specify the datetime column in the input data used for building the time series + and inferring its frequency. + :paramtype time_column_name: str + :keyword time_series_id_column_names: The names of columns used to group a timeseries. It can + be used to create multiple series. + If grain is not defined, the data set is assumed to be one time-series. This parameter is used + with task type forecasting. + :paramtype time_series_id_column_names: list[str] + :keyword use_stl: Configure STL Decomposition of the time-series target column. Known values + are: "None", "Season", "SeasonTrend". + :paramtype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl + """ + super(ForecastingSettings, self).__init__(**kwargs) + self.country_or_region_for_holidays = country_or_region_for_holidays + self.cv_step_size = cv_step_size + self.feature_lags = feature_lags + self.forecast_horizon = forecast_horizon + self.frequency = frequency + self.seasonality = seasonality + self.short_series_handling_config = short_series_handling_config + self.target_aggregate_function = target_aggregate_function + self.target_lags = target_lags + self.target_rolling_window_size = target_rolling_window_size + self.time_column_name = time_column_name + self.time_series_id_column_names = time_series_id_column_names + self.use_stl = use_stl + + +class ForecastingTrainingSettings(TrainingSettings): + """Forecasting Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar allowed_training_algorithms: Allowed models for forecasting task. + :vartype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :ivar blocked_training_algorithms: Blocked models for forecasting task. + :vartype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + """ + + _attribute_map = { + 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, + 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, + 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, + 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, + 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, + 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, + 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, + 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, + 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + } + + def __init__( + self, + *, + enable_dnn_training: Optional[bool] = False, + enable_model_explainability: Optional[bool] = True, + enable_onnx_compatible_models: Optional[bool] = False, + enable_stack_ensemble: Optional[bool] = True, + enable_vote_ensemble: Optional[bool] = True, + ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, + **kwargs + ): + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword allowed_training_algorithms: Allowed models for forecasting task. + :paramtype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + :keyword blocked_training_algorithms: Blocked models for forecasting task. + :paramtype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.ForecastingModels] + """ + super(ForecastingTrainingSettings, self).__init__(enable_dnn_training=enable_dnn_training, enable_model_explainability=enable_model_explainability, enable_onnx_compatible_models=enable_onnx_compatible_models, enable_stack_ensemble=enable_stack_ensemble, enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, **kwargs) + self.allowed_training_algorithms = allowed_training_algorithms + self.blocked_training_algorithms = blocked_training_algorithms + + +class FQDNEndpoint(msrest.serialization.Model): + """FQDNEndpoint. + + :ivar domain_name: + :vartype domain_name: str + :ivar endpoint_details: + :vartype endpoint_details: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] + """ + + _attribute_map = { + 'domain_name': {'key': 'domainName', 'type': 'str'}, + 'endpoint_details': {'key': 'endpointDetails', 'type': '[FQDNEndpointDetail]'}, + } + + def __init__( + self, + *, + domain_name: Optional[str] = None, + endpoint_details: Optional[List["_models.FQDNEndpointDetail"]] = None, + **kwargs + ): + """ + :keyword domain_name: + :paramtype domain_name: str + :keyword endpoint_details: + :paramtype endpoint_details: + list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] + """ + super(FQDNEndpoint, self).__init__(**kwargs) + self.domain_name = domain_name + self.endpoint_details = endpoint_details + + +class FQDNEndpointDetail(msrest.serialization.Model): + """FQDNEndpointDetail. + + :ivar port: + :vartype port: int + """ + + _attribute_map = { + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__( + self, + *, + port: Optional[int] = None, + **kwargs + ): + """ + :keyword port: + :paramtype port: int + """ + super(FQDNEndpointDetail, self).__init__(**kwargs) + self.port = port + + +class FQDNEndpoints(msrest.serialization.Model): + """FQDNEndpoints. + + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpointsProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'FQDNEndpointsProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.FQDNEndpointsProperties"] = None, + **kwargs + ): + """ + :keyword properties: + :paramtype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpointsProperties + """ + super(FQDNEndpoints, self).__init__(**kwargs) + self.properties = properties + + +class FQDNEndpointsProperties(msrest.serialization.Model): + """FQDNEndpointsProperties. + + :ivar category: + :vartype category: str + :ivar endpoints: + :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + """ + + _attribute_map = { + 'category': {'key': 'category', 'type': 'str'}, + 'endpoints': {'key': 'endpoints', 'type': '[FQDNEndpoint]'}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + endpoints: Optional[List["_models.FQDNEndpoint"]] = None, + **kwargs + ): + """ + :keyword category: + :paramtype category: str + :keyword endpoints: + :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] + """ + super(FQDNEndpointsProperties, self).__init__(**kwargs) + self.category = category + self.endpoints = endpoints + + +class GridSamplingAlgorithm(SamplingAlgorithm): + """Defines a Sampling Algorithm that exhaustively generates every value combination in the space. + + All required parameters must be populated in order to send to Azure. + + :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating + hyperparameter values, along with configuration properties.Constant filled by server. Known + values are: "Grid", "Random", "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + 'sampling_algorithm_type': {'required': True}, + } + + _attribute_map = { + 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(GridSamplingAlgorithm, self).__init__(**kwargs) + self.sampling_algorithm_type = 'Grid' # type: str + + +class HdfsDatastore(DatastoreProperties): + """HdfsDatastore. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar credentials: Required. [Required] Account credentials. + :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled + by server. Known values are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", + "AzureFile", "Hdfs". + :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType + :ivar is_default: Readonly property to indicate if datastore is the workspace default + datastore. + :vartype is_default: bool + :ivar hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded + string. Required if "Https" protocol is selected. + :vartype hdfs_server_certificate: str + :ivar name_node_address: Required. [Required] IP Address or DNS HostName. + :vartype name_node_address: str + :ivar protocol: Protocol used to communicate with the storage account (Https/Http). + :vartype protocol: str + """ + + _validation = { + 'credentials': {'required': True}, + 'datastore_type': {'required': True}, + 'is_default': {'readonly': True}, + 'name_node_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + 'hdfs_server_certificate': {'key': 'hdfsServerCertificate', 'type': 'str'}, + 'name_node_address': {'key': 'nameNodeAddress', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, + } + + def __init__( + self, + *, + credentials: "_models.DatastoreCredentials", + name_node_address: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + hdfs_server_certificate: Optional[str] = None, + protocol: Optional[str] = "http", + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword credentials: Required. [Required] Account credentials. + :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :keyword hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded + string. Required if "Https" protocol is selected. + :paramtype hdfs_server_certificate: str + :keyword name_node_address: Required. [Required] IP Address or DNS HostName. + :paramtype name_node_address: str + :keyword protocol: Protocol used to communicate with the storage account (Https/Http). + :paramtype protocol: str + """ + super(HdfsDatastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, **kwargs) + self.datastore_type = 'Hdfs' # type: str + self.hdfs_server_certificate = hdfs_server_certificate + self.name_node_address = name_node_address + self.protocol = protocol + + +class HDInsightSchema(msrest.serialization.Model): + """HDInsightSchema. + + :ivar properties: HDInsight compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.HDInsightProperties"] = None, + **kwargs + ): + """ + :keyword properties: HDInsight compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + super(HDInsightSchema, self).__init__(**kwargs) + self.properties = properties + + +class HDInsight(Compute, HDInsightSchema): + """A HDInsight compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: HDInsight compute properties. + :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + properties: Optional["_models.HDInsightProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + """ + :keyword properties: HDInsight compute properties. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(HDInsight, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + self.properties = properties + self.compute_type = 'HDInsight' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class HDInsightProperties(msrest.serialization.Model): + """HDInsight compute properties. + + :ivar ssh_port: Port open for ssh connections on the master node of the cluster. + :vartype ssh_port: int + :ivar address: Public IP address of the master node of the cluster. + :vartype address: str + :ivar administrator_account: Admin credentials for master node of the cluster. + :vartype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + + _attribute_map = { + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'address': {'key': 'address', 'type': 'str'}, + 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + *, + ssh_port: Optional[int] = None, + address: Optional[str] = None, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + **kwargs + ): + """ + :keyword ssh_port: Port open for ssh connections on the master node of the cluster. + :paramtype ssh_port: int + :keyword address: Public IP address of the master node of the cluster. + :paramtype address: str + :keyword administrator_account: Admin credentials for master node of the cluster. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + super(HDInsightProperties, self).__init__(**kwargs) + self.ssh_port = ssh_port + self.address = address + self.administrator_account = administrator_account + + +class IdAssetReference(AssetReferenceBase): + """Reference to an asset via its ARM resource ID. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant + filled by server. Known values are: "Id", "DataPath", "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar asset_id: Required. [Required] ARM resource ID of the asset. + :vartype asset_id: str + """ + + _validation = { + 'reference_type': {'required': True}, + 'asset_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'asset_id': {'key': 'assetId', 'type': 'str'}, + } + + def __init__( + self, + *, + asset_id: str, + **kwargs + ): + """ + :keyword asset_id: Required. [Required] ARM resource ID of the asset. + :paramtype asset_id: str + """ + super(IdAssetReference, self).__init__(**kwargs) + self.reference_type = 'Id' # type: str + self.asset_id = asset_id + + +class IdentityForCmk(msrest.serialization.Model): + """Identity that will be used to access key vault for encryption at rest. + + :ivar user_assigned_identity: The ArmId of the user assigned identity that will be used to + access the customer managed key vault. + :vartype user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + user_assigned_identity: Optional[str] = None, + **kwargs + ): + """ + :keyword user_assigned_identity: The ArmId of the user assigned identity that will be used to + access the customer managed key vault. + :paramtype user_assigned_identity: str + """ + super(IdentityForCmk, self).__init__(**kwargs) + self.user_assigned_identity = user_assigned_identity + + +class IdleShutdownSetting(msrest.serialization.Model): + """Stops compute instance after user defined period of inactivity. + + :ivar idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum + is 3 days. + :vartype idle_time_before_shutdown: str + """ + + _attribute_map = { + 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, + } + + def __init__( + self, + *, + idle_time_before_shutdown: Optional[str] = None, + **kwargs + ): + """ + :keyword idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, + maximum is 3 days. + :paramtype idle_time_before_shutdown: str + """ + super(IdleShutdownSetting, self).__init__(**kwargs) + self.idle_time_before_shutdown = idle_time_before_shutdown + + +class Image(msrest.serialization.Model): + """Image. + + :ivar additional_properties: Unmatched properties from the message are deserialized to this + collection. + :vartype additional_properties: dict[str, any] + :ivar type: Type of the image. Possible values are: docker - For docker images. azureml - For + AzureML images. Known values are: "docker", "azureml". Default value: "docker". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ImageType + :ivar reference: Image reference URL. + :vartype reference: str + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference': {'key': 'reference', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, Any]] = None, + type: Optional[Union[str, "_models.ImageType"]] = "docker", + reference: Optional[str] = None, + **kwargs + ): + """ + :keyword additional_properties: Unmatched properties from the message are deserialized to this + collection. + :paramtype additional_properties: dict[str, any] + :keyword type: Type of the image. Possible values are: docker - For docker images. azureml - + For AzureML images. Known values are: "docker", "azureml". Default value: "docker". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ImageType + :keyword reference: Image reference URL. + :paramtype reference: str + """ + super(Image, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = type + self.reference = reference + + +class ImageVertical(msrest.serialization.Model): + """Abstract class for AutoML tasks that train image (computer vision) models - +such as Image Classification / Image Classification Multilabel / Image Object Detection / Image Instance Segmentation. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + """ + + _validation = { + 'limit_settings': {'required': True}, + } + + _attribute_map = { + 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, + 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + **kwargs + ): + """ + :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + """ + super(ImageVertical, self).__init__(**kwargs) + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + + +class ImageClassificationBase(ImageVertical): + """ImageClassificationBase. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + """ + + _validation = { + 'limit_settings': {'required': True}, + } + + _attribute_map = { + 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, + 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, + 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + **kwargs + ): + """ + :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + """ + super(ImageClassificationBase, self).__init__(limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, **kwargs) + self.model_settings = model_settings + self.search_space = search_space + + +class ImageClassification(AutoMLVertical, ImageClassificationBase): + """Image Classification. Multi-class image classification is used when an image is classified with only a single label +from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' or a 'duck'. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + + _validation = { + 'limit_settings': {'required': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, + } + + _attribute_map = { + 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, + 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, + 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + training_data: "_models.MLTableJobInput", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + super(ImageClassification, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + self.task_type = 'ImageClassification' # type: str + self.primary_metric = primary_metric + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class ImageClassificationMultilabel(AutoMLVertical, ImageClassificationBase): + """Image Classification Multilabel. Multi-label image classification is used when an image could have one or more labels +from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted", "IOU". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + """ + + _validation = { + 'limit_settings': {'required': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, + } + + _attribute_map = { + 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, + 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, + 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + training_data: "_models.MLTableJobInput", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.ClassificationMultilabelPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted", "IOU". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics + """ + super(ImageClassificationMultilabel, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + self.task_type = 'ImageClassificationMultilabel' # type: str + self.primary_metric = primary_metric + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class ImageObjectDetectionBase(ImageVertical): + """ImageObjectDetectionBase. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + """ + + _validation = { + 'limit_settings': {'required': True}, + } + + _attribute_map = { + 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, + 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, + 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + **kwargs + ): + """ + :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + """ + super(ImageObjectDetectionBase, self).__init__(limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, **kwargs) + self.model_settings = model_settings + self.search_space = search_space + + +class ImageInstanceSegmentation(AutoMLVertical, ImageObjectDetectionBase): + """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at the pixel level, +drawing a polygon around each object in the image. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "MeanAveragePrecision". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + """ + + _validation = { + 'limit_settings': {'required': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, + } + + _attribute_map = { + 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, + 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, + 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + training_data: "_models.MLTableJobInput", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.InstanceSegmentationPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "MeanAveragePrecision". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics + """ + super(ImageInstanceSegmentation, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + self.task_type = 'ImageInstanceSegmentation' # type: str + self.primary_metric = primary_metric + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class ImageLimitSettings(msrest.serialization.Model): + """Limit settings for the AutoML job. + + :ivar max_concurrent_trials: Maximum number of concurrent AutoML iterations. + :vartype max_concurrent_trials: int + :ivar max_trials: Maximum number of AutoML iterations. + :vartype max_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, + 'max_trials': {'key': 'maxTrials', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__( + self, + *, + max_concurrent_trials: Optional[int] = 1, + max_trials: Optional[int] = 1, + timeout: Optional[datetime.timedelta] = "P7D", + **kwargs + ): + """ + :keyword max_concurrent_trials: Maximum number of concurrent AutoML iterations. + :paramtype max_concurrent_trials: int + :keyword max_trials: Maximum number of AutoML iterations. + :paramtype max_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + """ + super(ImageLimitSettings, self).__init__(**kwargs) + self.max_concurrent_trials = max_concurrent_trials + self.max_trials = max_trials + self.timeout = timeout + + +class ImageModelDistributionSettings(msrest.serialization.Model): + """Distribution expressions to sweep over values of model settings. + +:code:` +Some examples are: + +ModelName = "choice('seresnext', 'resnest50')"; +LearningRate = "uniform(0.001, 0.01)"; +LayersToFreeze = "choice(0, 2)"; +` +All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., valn) +where distribution name can be: uniform, quniform, loguniform, etc +For more details on how to compose distribution expressions please check the documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters +For more information on the available settings please visit the official documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + """ + + _attribute_map = { + 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, + 'augmentations': {'key': 'augmentations', 'type': 'str'}, + 'beta1': {'key': 'beta1', 'type': 'str'}, + 'beta2': {'key': 'beta2', 'type': 'str'}, + 'distributed': {'key': 'distributed', 'type': 'str'}, + 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, + 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, + 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, + 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, + 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, + 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, + 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, + 'learning_rate': {'key': 'learningRate', 'type': 'str'}, + 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, + 'model_name': {'key': 'modelName', 'type': 'str'}, + 'momentum': {'key': 'momentum', 'type': 'str'}, + 'nesterov': {'key': 'nesterov', 'type': 'str'}, + 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, + 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, + 'optimizer': {'key': 'optimizer', 'type': 'str'}, + 'random_seed': {'key': 'randomSeed', 'type': 'str'}, + 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, + 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, + 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, + 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, + 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, + 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, + 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, + } + + def __init__( + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + **kwargs + ): + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + """ + super(ImageModelDistributionSettings, self).__init__(**kwargs) + self.ams_gradient = ams_gradient + self.augmentations = augmentations + self.beta1 = beta1 + self.beta2 = beta2 + self.distributed = distributed + self.early_stopping = early_stopping + self.early_stopping_delay = early_stopping_delay + self.early_stopping_patience = early_stopping_patience + self.enable_onnx_normalization = enable_onnx_normalization + self.evaluation_frequency = evaluation_frequency + self.gradient_accumulation_step = gradient_accumulation_step + self.layers_to_freeze = layers_to_freeze + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.momentum = momentum + self.nesterov = nesterov + self.number_of_epochs = number_of_epochs + self.number_of_workers = number_of_workers + self.optimizer = optimizer + self.random_seed = random_seed + self.step_lr_gamma = step_lr_gamma + self.step_lr_step_size = step_lr_step_size + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles + self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs + self.weight_decay = weight_decay + + +class ImageModelDistributionSettingsClassification(ImageModelDistributionSettings): + """Distribution expressions to sweep over values of model settings. + +:code:` +Some examples are: + +ModelName = "choice('seresnext', 'resnest50')"; +LearningRate = "uniform(0.001, 0.01)"; +LayersToFreeze = "choice(0, 2)"; +` +For more details on how to compose distribution expressions please check the documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters +For more information on the available settings please visit the official documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: str + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: str + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: str + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: str + """ + + _attribute_map = { + 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, + 'augmentations': {'key': 'augmentations', 'type': 'str'}, + 'beta1': {'key': 'beta1', 'type': 'str'}, + 'beta2': {'key': 'beta2', 'type': 'str'}, + 'distributed': {'key': 'distributed', 'type': 'str'}, + 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, + 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, + 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, + 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, + 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, + 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, + 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, + 'learning_rate': {'key': 'learningRate', 'type': 'str'}, + 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, + 'model_name': {'key': 'modelName', 'type': 'str'}, + 'momentum': {'key': 'momentum', 'type': 'str'}, + 'nesterov': {'key': 'nesterov', 'type': 'str'}, + 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, + 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, + 'optimizer': {'key': 'optimizer', 'type': 'str'}, + 'random_seed': {'key': 'randomSeed', 'type': 'str'}, + 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, + 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, + 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, + 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, + 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, + 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, + 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, + 'training_crop_size': {'key': 'trainingCropSize', 'type': 'str'}, + 'validation_crop_size': {'key': 'validationCropSize', 'type': 'str'}, + 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'str'}, + 'weighted_loss': {'key': 'weightedLoss', 'type': 'str'}, + } + + def __init__( + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + training_crop_size: Optional[str] = None, + validation_crop_size: Optional[str] = None, + validation_resize_size: Optional[str] = None, + weighted_loss: Optional[str] = None, + **kwargs + ): + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + :keyword training_crop_size: Image crop size that is input to the neural network for the + training dataset. Must be a positive integer. + :paramtype training_crop_size: str + :keyword validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :paramtype validation_crop_size: str + :keyword validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :paramtype validation_resize_size: str + :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :paramtype weighted_loss: str + """ + super(ImageModelDistributionSettingsClassification, self).__init__(ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + self.training_crop_size = training_crop_size + self.validation_crop_size = validation_crop_size + self.validation_resize_size = validation_resize_size + self.weighted_loss = weighted_loss + + +class ImageModelDistributionSettingsObjectDetection(ImageModelDistributionSettings): + """Distribution expressions to sweep over values of model settings. + +:code:` +Some examples are: + +ModelName = "choice('seresnext', 'resnest50')"; +LearningRate = "uniform(0.001, 0.01)"; +LayersToFreeze = "choice(0, 2)"; +` +For more details on how to compose distribution expressions please check the documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters +For more information on the available settings please visit the official documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: str + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: str + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: str + :ivar distributed: Whether to use distributer training. + :vartype distributed: str + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: str + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: str + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: str + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: str + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: str + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: str + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: str + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: str + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :vartype learning_rate_scheduler: str + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: str + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: str + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: str + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: str + :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :vartype optimizer: str + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: str + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: str + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: str + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: str + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: str + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: str + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: str + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: str + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: str + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: str + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: str + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: str + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: str + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype model_size: str + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: str + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + float in the range [0, 1]. + :vartype nms_iou_threshold: str + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: str + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + NMS: Non-maximum suppression. + :vartype tile_predictions_nms_threshold: str + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: str + :ivar validation_metric_type: Metric computation method to use for validation metrics. Must be + 'none', 'coco', 'voc', or 'coco_voc'. + :vartype validation_metric_type: str + """ + + _attribute_map = { + 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, + 'augmentations': {'key': 'augmentations', 'type': 'str'}, + 'beta1': {'key': 'beta1', 'type': 'str'}, + 'beta2': {'key': 'beta2', 'type': 'str'}, + 'distributed': {'key': 'distributed', 'type': 'str'}, + 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, + 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, + 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, + 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, + 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, + 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, + 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, + 'learning_rate': {'key': 'learningRate', 'type': 'str'}, + 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, + 'model_name': {'key': 'modelName', 'type': 'str'}, + 'momentum': {'key': 'momentum', 'type': 'str'}, + 'nesterov': {'key': 'nesterov', 'type': 'str'}, + 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, + 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, + 'optimizer': {'key': 'optimizer', 'type': 'str'}, + 'random_seed': {'key': 'randomSeed', 'type': 'str'}, + 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, + 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, + 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, + 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, + 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, + 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, + 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, + 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'str'}, + 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'str'}, + 'image_size': {'key': 'imageSize', 'type': 'str'}, + 'max_size': {'key': 'maxSize', 'type': 'str'}, + 'min_size': {'key': 'minSize', 'type': 'str'}, + 'model_size': {'key': 'modelSize', 'type': 'str'}, + 'multi_scale': {'key': 'multiScale', 'type': 'str'}, + 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'str'}, + 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, + 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'str'}, + 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'str'}, + 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'str'}, + 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, + } + + def __init__( + self, + *, + ams_gradient: Optional[str] = None, + augmentations: Optional[str] = None, + beta1: Optional[str] = None, + beta2: Optional[str] = None, + distributed: Optional[str] = None, + early_stopping: Optional[str] = None, + early_stopping_delay: Optional[str] = None, + early_stopping_patience: Optional[str] = None, + enable_onnx_normalization: Optional[str] = None, + evaluation_frequency: Optional[str] = None, + gradient_accumulation_step: Optional[str] = None, + layers_to_freeze: Optional[str] = None, + learning_rate: Optional[str] = None, + learning_rate_scheduler: Optional[str] = None, + model_name: Optional[str] = None, + momentum: Optional[str] = None, + nesterov: Optional[str] = None, + number_of_epochs: Optional[str] = None, + number_of_workers: Optional[str] = None, + optimizer: Optional[str] = None, + random_seed: Optional[str] = None, + step_lr_gamma: Optional[str] = None, + step_lr_step_size: Optional[str] = None, + training_batch_size: Optional[str] = None, + validation_batch_size: Optional[str] = None, + warmup_cosine_lr_cycles: Optional[str] = None, + warmup_cosine_lr_warmup_epochs: Optional[str] = None, + weight_decay: Optional[str] = None, + box_detections_per_image: Optional[str] = None, + box_score_threshold: Optional[str] = None, + image_size: Optional[str] = None, + max_size: Optional[str] = None, + min_size: Optional[str] = None, + model_size: Optional[str] = None, + multi_scale: Optional[str] = None, + nms_iou_threshold: Optional[str] = None, + tile_grid_size: Optional[str] = None, + tile_overlap_ratio: Optional[str] = None, + tile_predictions_nms_threshold: Optional[str] = None, + validation_iou_threshold: Optional[str] = None, + validation_metric_type: Optional[str] = None, + **kwargs + ): + """ + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: str + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: str + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: str + :keyword distributed: Whether to use distributer training. + :paramtype distributed: str + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: str + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: str + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: str + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: str + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: str + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: str + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: str + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: str + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. + :paramtype learning_rate_scheduler: str + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: str + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: str + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: str + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: str + :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. + :paramtype optimizer: str + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: str + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: str + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: str + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: str + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: str + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: str + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: str + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: str + :keyword box_detections_per_image: Maximum number of detections per image, for all classes. + Must be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype box_detections_per_image: str + :keyword box_score_threshold: During inference, only return proposals with a classification + score greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :paramtype box_score_threshold: str + :keyword image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype image_size: str + :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype max_size: str + :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype min_size: str + :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype model_size: str + :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype multi_scale: str + :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + float in the range [0, 1]. + :paramtype nms_iou_threshold: str + :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must + not be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_grid_size: str + :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be + float in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_overlap_ratio: str + :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + NMS: Non-maximum suppression. + :paramtype tile_predictions_nms_threshold: str + :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must + be float in the range [0, 1]. + :paramtype validation_iou_threshold: str + :keyword validation_metric_type: Metric computation method to use for validation metrics. Must + be 'none', 'coco', 'voc', or 'coco_voc'. + :paramtype validation_metric_type: str + """ + super(ImageModelDistributionSettingsObjectDetection, self).__init__(ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + self.box_detections_per_image = box_detections_per_image + self.box_score_threshold = box_score_threshold + self.image_size = image_size + self.max_size = max_size + self.min_size = min_size + self.model_size = model_size + self.multi_scale = multi_scale + self.nms_iou_threshold = nms_iou_threshold + self.tile_grid_size = tile_grid_size + self.tile_overlap_ratio = tile_overlap_ratio + self.tile_predictions_nms_threshold = tile_predictions_nms_threshold + self.validation_iou_threshold = validation_iou_threshold + self.validation_metric_type = validation_metric_type + + +class ImageModelSettings(msrest.serialization.Model): + """Settings used for training the model. +For more information on the available settings please visit the official documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + """ + + _attribute_map = { + 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, + 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, + 'augmentations': {'key': 'augmentations', 'type': 'str'}, + 'beta1': {'key': 'beta1', 'type': 'float'}, + 'beta2': {'key': 'beta2', 'type': 'float'}, + 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, + 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, + 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, + 'distributed': {'key': 'distributed', 'type': 'bool'}, + 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, + 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, + 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, + 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, + 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, + 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, + 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, + 'learning_rate': {'key': 'learningRate', 'type': 'float'}, + 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, + 'model_name': {'key': 'modelName', 'type': 'str'}, + 'momentum': {'key': 'momentum', 'type': 'float'}, + 'nesterov': {'key': 'nesterov', 'type': 'bool'}, + 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, + 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, + 'optimizer': {'key': 'optimizer', 'type': 'str'}, + 'random_seed': {'key': 'randomSeed', 'type': 'int'}, + 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, + 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, + 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, + 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, + 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, + 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, + 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, + } + + def __init__( + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + **kwargs + ): + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + """ + super(ImageModelSettings, self).__init__(**kwargs) + self.advanced_settings = advanced_settings + self.ams_gradient = ams_gradient + self.augmentations = augmentations + self.beta1 = beta1 + self.beta2 = beta2 + self.checkpoint_frequency = checkpoint_frequency + self.checkpoint_model = checkpoint_model + self.checkpoint_run_id = checkpoint_run_id + self.distributed = distributed + self.early_stopping = early_stopping + self.early_stopping_delay = early_stopping_delay + self.early_stopping_patience = early_stopping_patience + self.enable_onnx_normalization = enable_onnx_normalization + self.evaluation_frequency = evaluation_frequency + self.gradient_accumulation_step = gradient_accumulation_step + self.layers_to_freeze = layers_to_freeze + self.learning_rate = learning_rate + self.learning_rate_scheduler = learning_rate_scheduler + self.model_name = model_name + self.momentum = momentum + self.nesterov = nesterov + self.number_of_epochs = number_of_epochs + self.number_of_workers = number_of_workers + self.optimizer = optimizer + self.random_seed = random_seed + self.step_lr_gamma = step_lr_gamma + self.step_lr_step_size = step_lr_step_size + self.training_batch_size = training_batch_size + self.validation_batch_size = validation_batch_size + self.warmup_cosine_lr_cycles = warmup_cosine_lr_cycles + self.warmup_cosine_lr_warmup_epochs = warmup_cosine_lr_warmup_epochs + self.weight_decay = weight_decay + + +class ImageModelSettingsClassification(ImageModelSettings): + """Settings used for training the model. +For more information on the available settings please visit the official documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + :ivar training_crop_size: Image crop size that is input to the neural network for the training + dataset. Must be a positive integer. + :vartype training_crop_size: int + :ivar validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :vartype validation_crop_size: int + :ivar validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :vartype validation_resize_size: int + :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :vartype weighted_loss: int + """ + + _attribute_map = { + 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, + 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, + 'augmentations': {'key': 'augmentations', 'type': 'str'}, + 'beta1': {'key': 'beta1', 'type': 'float'}, + 'beta2': {'key': 'beta2', 'type': 'float'}, + 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, + 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, + 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, + 'distributed': {'key': 'distributed', 'type': 'bool'}, + 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, + 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, + 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, + 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, + 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, + 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, + 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, + 'learning_rate': {'key': 'learningRate', 'type': 'float'}, + 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, + 'model_name': {'key': 'modelName', 'type': 'str'}, + 'momentum': {'key': 'momentum', 'type': 'float'}, + 'nesterov': {'key': 'nesterov', 'type': 'bool'}, + 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, + 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, + 'optimizer': {'key': 'optimizer', 'type': 'str'}, + 'random_seed': {'key': 'randomSeed', 'type': 'int'}, + 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, + 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, + 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, + 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, + 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, + 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, + 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, + 'training_crop_size': {'key': 'trainingCropSize', 'type': 'int'}, + 'validation_crop_size': {'key': 'validationCropSize', 'type': 'int'}, + 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'int'}, + 'weighted_loss': {'key': 'weightedLoss', 'type': 'int'}, + } + + def __init__( + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + training_crop_size: Optional[int] = None, + validation_crop_size: Optional[int] = None, + validation_resize_size: Optional[int] = None, + weighted_loss: Optional[int] = None, + **kwargs + ): + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + :keyword training_crop_size: Image crop size that is input to the neural network for the + training dataset. Must be a positive integer. + :paramtype training_crop_size: int + :keyword validation_crop_size: Image crop size that is input to the neural network for the + validation dataset. Must be a positive integer. + :paramtype validation_crop_size: int + :keyword validation_resize_size: Image size to which to resize before cropping for validation + dataset. Must be a positive integer. + :paramtype validation_resize_size: int + :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. + 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be + 0 or 1 or 2. + :paramtype weighted_loss: int + """ + super(ImageModelSettingsClassification, self).__init__(advanced_settings=advanced_settings, ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, checkpoint_frequency=checkpoint_frequency, checkpoint_model=checkpoint_model, checkpoint_run_id=checkpoint_run_id, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + self.training_crop_size = training_crop_size + self.validation_crop_size = validation_crop_size + self.validation_resize_size = validation_resize_size + self.weighted_loss = weighted_loss + + +class ImageModelSettingsObjectDetection(ImageModelSettings): + """Settings used for training the model. +For more information on the available settings please visit the official documentation: +https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + + :ivar advanced_settings: Settings for advanced scenarios. + :vartype advanced_settings: str + :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :vartype ams_gradient: bool + :ivar augmentations: Settings for using Augmentations. + :vartype augmentations: str + :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta1: float + :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range + [0, 1]. + :vartype beta2: float + :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. + :vartype checkpoint_frequency: int + :ivar checkpoint_model: The pretrained checkpoint model for incremental training. + :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :vartype checkpoint_run_id: str + :ivar distributed: Whether to use distributed training. + :vartype distributed: bool + :ivar early_stopping: Enable early stopping logic during training. + :vartype early_stopping: bool + :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before + primary metric improvement + is tracked for early stopping. Must be a positive integer. + :vartype early_stopping_delay: int + :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :vartype early_stopping_patience: int + :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. + :vartype enable_onnx_normalization: bool + :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must + be a positive integer. + :vartype evaluation_frequency: int + :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :vartype gradient_accumulation_step: int + :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype layers_to_freeze: int + :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :vartype learning_rate: float + :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", "Step". + :vartype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :ivar model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :vartype model_name: str + :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. + :vartype momentum: float + :ivar nesterov: Enable nesterov when optimizer is 'sgd'. + :vartype nesterov: bool + :ivar number_of_epochs: Number of training epochs. Must be a positive integer. + :vartype number_of_epochs: int + :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. + :vartype number_of_workers: int + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", "Adamw". + :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :ivar random_seed: Random seed to be used when using deterministic training. + :vartype random_seed: int + :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in + the range [0, 1]. + :vartype step_lr_gamma: float + :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a + positive integer. + :vartype step_lr_step_size: int + :ivar training_batch_size: Training batch size. Must be a positive integer. + :vartype training_batch_size: int + :ivar validation_batch_size: Validation batch size. Must be a positive integer. + :vartype validation_batch_size: int + :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :vartype warmup_cosine_lr_cycles: float + :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :vartype warmup_cosine_lr_warmup_epochs: int + :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be + a float in the range[0, 1]. + :vartype weight_decay: float + :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must + be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype box_detections_per_image: int + :ivar box_score_threshold: During inference, only return proposals with a classification score + greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :vartype box_score_threshold: float + :ivar image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype image_size: int + :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype max_size: int + :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype min_size: int + :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", "ExtraLarge". + :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize + :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :vartype multi_scale: bool + :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be a + float in the range [0, 1]. + :vartype nms_iou_threshold: float + :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not + be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_grid_size: str + :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float + in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_overlap_ratio: float + :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + :vartype tile_predictions_nms_threshold: float + :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be + float in the range [0, 1]. + :vartype validation_iou_threshold: float + :ivar validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", "CocoVoc". + :vartype validation_metric_type: str or + ~azure.mgmt.machinelearningservices.models.ValidationMetricType + """ + + _attribute_map = { + 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, + 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, + 'augmentations': {'key': 'augmentations', 'type': 'str'}, + 'beta1': {'key': 'beta1', 'type': 'float'}, + 'beta2': {'key': 'beta2', 'type': 'float'}, + 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, + 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, + 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, + 'distributed': {'key': 'distributed', 'type': 'bool'}, + 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, + 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, + 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, + 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, + 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, + 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, + 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, + 'learning_rate': {'key': 'learningRate', 'type': 'float'}, + 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, + 'model_name': {'key': 'modelName', 'type': 'str'}, + 'momentum': {'key': 'momentum', 'type': 'float'}, + 'nesterov': {'key': 'nesterov', 'type': 'bool'}, + 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, + 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, + 'optimizer': {'key': 'optimizer', 'type': 'str'}, + 'random_seed': {'key': 'randomSeed', 'type': 'int'}, + 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, + 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, + 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, + 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, + 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, + 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, + 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, + 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'int'}, + 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'float'}, + 'image_size': {'key': 'imageSize', 'type': 'int'}, + 'max_size': {'key': 'maxSize', 'type': 'int'}, + 'min_size': {'key': 'minSize', 'type': 'int'}, + 'model_size': {'key': 'modelSize', 'type': 'str'}, + 'multi_scale': {'key': 'multiScale', 'type': 'bool'}, + 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'float'}, + 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, + 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'float'}, + 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'float'}, + 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'float'}, + 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, + } + + def __init__( + self, + *, + advanced_settings: Optional[str] = None, + ams_gradient: Optional[bool] = None, + augmentations: Optional[str] = None, + beta1: Optional[float] = None, + beta2: Optional[float] = None, + checkpoint_frequency: Optional[int] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, + checkpoint_run_id: Optional[str] = None, + distributed: Optional[bool] = None, + early_stopping: Optional[bool] = None, + early_stopping_delay: Optional[int] = None, + early_stopping_patience: Optional[int] = None, + enable_onnx_normalization: Optional[bool] = None, + evaluation_frequency: Optional[int] = None, + gradient_accumulation_step: Optional[int] = None, + layers_to_freeze: Optional[int] = None, + learning_rate: Optional[float] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, + model_name: Optional[str] = None, + momentum: Optional[float] = None, + nesterov: Optional[bool] = None, + number_of_epochs: Optional[int] = None, + number_of_workers: Optional[int] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, + random_seed: Optional[int] = None, + step_lr_gamma: Optional[float] = None, + step_lr_step_size: Optional[int] = None, + training_batch_size: Optional[int] = None, + validation_batch_size: Optional[int] = None, + warmup_cosine_lr_cycles: Optional[float] = None, + warmup_cosine_lr_warmup_epochs: Optional[int] = None, + weight_decay: Optional[float] = None, + box_detections_per_image: Optional[int] = None, + box_score_threshold: Optional[float] = None, + image_size: Optional[int] = None, + max_size: Optional[int] = None, + min_size: Optional[int] = None, + model_size: Optional[Union[str, "_models.ModelSize"]] = None, + multi_scale: Optional[bool] = None, + nms_iou_threshold: Optional[float] = None, + tile_grid_size: Optional[str] = None, + tile_overlap_ratio: Optional[float] = None, + tile_predictions_nms_threshold: Optional[float] = None, + validation_iou_threshold: Optional[float] = None, + validation_metric_type: Optional[Union[str, "_models.ValidationMetricType"]] = None, + **kwargs + ): + """ + :keyword advanced_settings: Settings for advanced scenarios. + :paramtype advanced_settings: str + :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. + :paramtype ams_gradient: bool + :keyword augmentations: Settings for using Augmentations. + :paramtype augmentations: str + :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta1: float + :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the + range [0, 1]. + :paramtype beta2: float + :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive + integer. + :paramtype checkpoint_frequency: int + :keyword checkpoint_model: The pretrained checkpoint model for incremental training. + :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput + :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for + incremental training. + :paramtype checkpoint_run_id: str + :keyword distributed: Whether to use distributed training. + :paramtype distributed: bool + :keyword early_stopping: Enable early stopping logic during training. + :paramtype early_stopping: bool + :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait + before primary metric improvement + is tracked for early stopping. Must be a positive integer. + :paramtype early_stopping_delay: int + :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no + primary metric improvement before + the run is stopped. Must be a positive integer. + :paramtype early_stopping_patience: int + :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. + :paramtype enable_onnx_normalization: bool + :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. + Must be a positive integer. + :paramtype evaluation_frequency: int + :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of + "GradAccumulationStep" steps without + updating the model weights while accumulating the gradients of those steps, and then using + the accumulated gradients to compute the weight updates. Must be a positive integer. + :paramtype gradient_accumulation_step: int + :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive + integer. + For instance, passing 2 as value for 'seresnext' means + freezing layer0 and layer1. For a full list of models supported and details on layer freeze, + please + see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype layers_to_freeze: int + :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. + :paramtype learning_rate: float + :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or + 'step'. Known values are: "None", "WarmupCosine", "Step". + :paramtype learning_rate_scheduler: str or + ~azure.mgmt.machinelearningservices.models.LearningRateScheduler + :keyword model_name: Name of the model to use for training. + For more information on the available models please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :paramtype model_name: str + :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, + 1]. + :paramtype momentum: float + :keyword nesterov: Enable nesterov when optimizer is 'sgd'. + :paramtype nesterov: bool + :keyword number_of_epochs: Number of training epochs. Must be a positive integer. + :paramtype number_of_epochs: int + :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. + :paramtype number_of_workers: int + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", "Adamw". + :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer + :keyword random_seed: Random seed to be used when using deterministic training. + :paramtype random_seed: int + :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float + in the range [0, 1]. + :paramtype step_lr_gamma: float + :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be + a positive integer. + :paramtype step_lr_step_size: int + :keyword training_batch_size: Training batch size. Must be a positive integer. + :paramtype training_batch_size: int + :keyword validation_batch_size: Validation batch size. Must be a positive integer. + :paramtype validation_batch_size: int + :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is + 'warmup_cosine'. Must be a float in the range [0, 1]. + :paramtype warmup_cosine_lr_cycles: float + :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is + 'warmup_cosine'. Must be a positive integer. + :paramtype warmup_cosine_lr_warmup_epochs: int + :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must + be a float in the range[0, 1]. + :paramtype weight_decay: float + :keyword box_detections_per_image: Maximum number of detections per image, for all classes. + Must be a positive integer. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype box_detections_per_image: int + :keyword box_score_threshold: During inference, only return proposals with a classification + score greater than + BoxScoreThreshold. Must be a float in the range[0, 1]. + :paramtype box_score_threshold: float + :keyword image_size: Image size for train and validation. Must be a positive integer. + Note: The training run may get into CUDA OOM if the size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype image_size: int + :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype max_size: int + :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. + Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype min_size: int + :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. + Note: training run may get into CUDA OOM if the model size is too big. + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", "ExtraLarge". + :paramtype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize + :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. + Note: training run may get into CUDA OOM if no sufficient GPU memory. + Note: This settings is only supported for the 'yolov5' algorithm. + :paramtype multi_scale: bool + :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be + a float in the range [0, 1]. + :paramtype nms_iou_threshold: float + :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must + not be + None to enable small object detection logic. A string containing two integers in mxn format. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_grid_size: str + :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be + float in the range [0, 1). + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_overlap_ratio: float + :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging + predictions from tiles and image. + Used in validation/ inference. Must be float in the range [0, 1]. + Note: This settings is not supported for the 'yolov5' algorithm. + :paramtype tile_predictions_nms_threshold: float + :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must + be float in the range [0, 1]. + :paramtype validation_iou_threshold: float + :keyword validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", "CocoVoc". + :paramtype validation_metric_type: str or + ~azure.mgmt.machinelearningservices.models.ValidationMetricType + """ + super(ImageModelSettingsObjectDetection, self).__init__(advanced_settings=advanced_settings, ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, checkpoint_frequency=checkpoint_frequency, checkpoint_model=checkpoint_model, checkpoint_run_id=checkpoint_run_id, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + self.box_detections_per_image = box_detections_per_image + self.box_score_threshold = box_score_threshold + self.image_size = image_size + self.max_size = max_size + self.min_size = min_size + self.model_size = model_size + self.multi_scale = multi_scale + self.nms_iou_threshold = nms_iou_threshold + self.tile_grid_size = tile_grid_size + self.tile_overlap_ratio = tile_overlap_ratio + self.tile_predictions_nms_threshold = tile_predictions_nms_threshold + self.validation_iou_threshold = validation_iou_threshold + self.validation_metric_type = validation_metric_type + + +class ImageObjectDetection(AutoMLVertical, ImageObjectDetectionBase): + """Image Object Detection. Object detection is used to identify objects in an image and locate each object with a +bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. + + All required parameters must be populated in order to send to Azure. + + :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar model_settings: Settings used for training the model. + :vartype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :ivar search_space: Search space for sampling different combinations of models and their + hyperparameters. + :vartype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "MeanAveragePrecision". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + """ + + _validation = { + 'limit_settings': {'required': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, + } + + _attribute_map = { + 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, + 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, + 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + } + + def __init__( + self, + *, + limit_settings: "_models.ImageLimitSettings", + training_data: "_models.MLTableJobInput", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.ObjectDetectionPrimaryMetrics"]] = None, + **kwargs + ): + """ + :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings + :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. + :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword model_settings: Settings used for training the model. + :paramtype model_settings: + ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection + :keyword search_space: Search space for sampling different combinations of models and their + hyperparameters. + :paramtype search_space: + list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "MeanAveragePrecision". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics + """ + super(ImageObjectDetection, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + self.limit_settings = limit_settings + self.sweep_settings = sweep_settings + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.model_settings = model_settings + self.search_space = search_space + self.task_type = 'ImageObjectDetection' # type: str + self.primary_metric = primary_metric + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class ImageSweepLimitSettings(msrest.serialization.Model): + """Limit settings for model sweeping and hyperparameter sweeping. + + :ivar max_concurrent_trials: Maximum number of concurrent iterations for the underlying Sweep + job. + :vartype max_concurrent_trials: int + :ivar max_trials: Maximum number of iterations for the underlying Sweep job. + :vartype max_trials: int + """ + + _attribute_map = { + 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, + 'max_trials': {'key': 'maxTrials', 'type': 'int'}, + } + + def __init__( + self, + *, + max_concurrent_trials: Optional[int] = None, + max_trials: Optional[int] = None, + **kwargs + ): + """ + :keyword max_concurrent_trials: Maximum number of concurrent iterations for the underlying + Sweep job. + :paramtype max_concurrent_trials: int + :keyword max_trials: Maximum number of iterations for the underlying Sweep job. + :paramtype max_trials: int + """ + super(ImageSweepLimitSettings, self).__init__(**kwargs) + self.max_concurrent_trials = max_concurrent_trials + self.max_trials = max_trials + + +class ImageSweepSettings(msrest.serialization.Model): + """Model sweeping and hyperparameter sweeping related settings. + + All required parameters must be populated in order to send to Azure. + + :ivar early_termination: Type of early termination policy. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar limits: Required. [Required] Limit settings for model sweeping and hyperparameter + sweeping. + :vartype limits: ~azure.mgmt.machinelearningservices.models.ImageSweepLimitSettings + :ivar sampling_algorithm: Required. [Required] Type of the hyperparameter sampling algorithms. + Known values are: "Grid", "Random", "Bayesian". + :vartype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + + _validation = { + 'limits': {'required': True}, + 'sampling_algorithm': {'required': True}, + } + + _attribute_map = { + 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, + 'limits': {'key': 'limits', 'type': 'ImageSweepLimitSettings'}, + 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, + } + + def __init__( + self, + *, + limits: "_models.ImageSweepLimitSettings", + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs + ): + """ + :keyword early_termination: Type of early termination policy. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword limits: Required. [Required] Limit settings for model sweeping and hyperparameter + sweeping. + :paramtype limits: ~azure.mgmt.machinelearningservices.models.ImageSweepLimitSettings + :keyword sampling_algorithm: Required. [Required] Type of the hyperparameter sampling + algorithms. Known values are: "Grid", "Random", "Bayesian". + :paramtype sampling_algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + """ + super(ImageSweepSettings, self).__init__(**kwargs) + self.early_termination = early_termination + self.limits = limits + self.sampling_algorithm = sampling_algorithm + + +class InferenceContainerProperties(msrest.serialization.Model): + """InferenceContainerProperties. + + :ivar liveness_route: The route to check the liveness of the inference server container. + :vartype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar readiness_route: The route to check the readiness of the inference server container. + :vartype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :ivar scoring_route: The port to send the scoring requests to, within the inference server + container. + :vartype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + + _attribute_map = { + 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, + 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, + 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, + } + + def __init__( + self, + *, + liveness_route: Optional["_models.Route"] = None, + readiness_route: Optional["_models.Route"] = None, + scoring_route: Optional["_models.Route"] = None, + **kwargs + ): + """ + :keyword liveness_route: The route to check the liveness of the inference server container. + :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword readiness_route: The route to check the readiness of the inference server container. + :paramtype readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :keyword scoring_route: The port to send the scoring requests to, within the inference server + container. + :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + super(InferenceContainerProperties, self).__init__(**kwargs) + self.liveness_route = liveness_route + self.readiness_route = readiness_route + self.scoring_route = scoring_route + + +class InstanceTypeSchema(msrest.serialization.Model): + """Instance type schema. + + :ivar node_selector: Node Selector. + :vartype node_selector: dict[str, str] + :ivar resources: Resource requests/limits for this instance type. + :vartype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources + """ + + _attribute_map = { + 'node_selector': {'key': 'nodeSelector', 'type': '{str}'}, + 'resources': {'key': 'resources', 'type': 'InstanceTypeSchemaResources'}, + } + + def __init__( + self, + *, + node_selector: Optional[Dict[str, str]] = None, + resources: Optional["_models.InstanceTypeSchemaResources"] = None, + **kwargs + ): + """ + :keyword node_selector: Node Selector. + :paramtype node_selector: dict[str, str] + :keyword resources: Resource requests/limits for this instance type. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources + """ + super(InstanceTypeSchema, self).__init__(**kwargs) + self.node_selector = node_selector + self.resources = resources + + +class InstanceTypeSchemaResources(msrest.serialization.Model): + """Resource requests/limits for this instance type. + + :ivar requests: Resource requests for this instance type. + :vartype requests: dict[str, str] + :ivar limits: Resource limits for this instance type. + :vartype limits: dict[str, str] + """ + + _attribute_map = { + 'requests': {'key': 'requests', 'type': '{str}'}, + 'limits': {'key': 'limits', 'type': '{str}'}, + } + + def __init__( + self, + *, + requests: Optional[Dict[str, str]] = None, + limits: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword requests: Resource requests for this instance type. + :paramtype requests: dict[str, str] + :keyword limits: Resource limits for this instance type. + :paramtype limits: dict[str, str] + """ + super(InstanceTypeSchemaResources, self).__init__(**kwargs) + self.requests = requests + self.limits = limits + + +class JobBase(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'JobBaseProperties'}, + } + + def __init__( + self, + *, + properties: "_models.JobBaseProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + super(JobBase, self).__init__(**kwargs) + self.properties = properties + + +class JobBaseResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of JobBase entities. + + :ivar next_link: The link to the next page of JobBase objects. If null, there are no additional + pages. + :vartype next_link: str + :ivar value: An array of objects of type JobBase. + :vartype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[JobBase]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.JobBase"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of JobBase objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type JobBase. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.JobBase] + """ + super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class JobResourceConfiguration(ResourceConfiguration): + """JobResourceConfiguration. + + :ivar instance_count: Optional number of instances or nodes used by the compute target. + :vartype instance_count: int + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar properties: Additional properties bag. + :vartype properties: dict[str, any] + :ivar docker_args: Extra arguments to pass to the Docker run command. This would override any + parameters that have already been set by the system, or in this section. This parameter is only + supported for Azure ML compute types. + :vartype docker_args: str + :ivar shm_size: Size of the docker container's shared memory block. This should be in the + format of (number)(unit) where number as to be greater than 0 and the unit can be one of + b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). + :vartype shm_size: str + """ + + _validation = { + 'shm_size': {'pattern': r'\d+[bBkKmMgG]'}, + } + + _attribute_map = { + 'instance_count': {'key': 'instanceCount', 'type': 'int'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{object}'}, + 'docker_args': {'key': 'dockerArgs', 'type': 'str'}, + 'shm_size': {'key': 'shmSize', 'type': 'str'}, + } + + def __init__( + self, + *, + instance_count: Optional[int] = 1, + instance_type: Optional[str] = None, + properties: Optional[Dict[str, Any]] = None, + docker_args: Optional[str] = None, + shm_size: Optional[str] = "2g", + **kwargs + ): + """ + :keyword instance_count: Optional number of instances or nodes used by the compute target. + :paramtype instance_count: int + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword properties: Additional properties bag. + :paramtype properties: dict[str, any] + :keyword docker_args: Extra arguments to pass to the Docker run command. This would override + any parameters that have already been set by the system, or in this section. This parameter is + only supported for Azure ML compute types. + :paramtype docker_args: str + :keyword shm_size: Size of the docker container's shared memory block. This should be in the + format of (number)(unit) where number as to be greater than 0 and the unit can be one of + b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). + :paramtype shm_size: str + """ + super(JobResourceConfiguration, self).__init__(instance_count=instance_count, instance_type=instance_type, properties=properties, **kwargs) + self.docker_args = docker_args + self.shm_size = shm_size + + +class JobScheduleAction(ScheduleActionBase): + """JobScheduleAction. + + All required parameters must be populated in order to send to Azure. + + :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant + filled by server. Known values are: "CreateJob", "InvokeBatchEndpoint". + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType + :ivar job_definition: Required. [Required] Defines Schedule action definition details. + :vartype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + + _validation = { + 'action_type': {'required': True}, + 'job_definition': {'required': True}, + } + + _attribute_map = { + 'action_type': {'key': 'actionType', 'type': 'str'}, + 'job_definition': {'key': 'jobDefinition', 'type': 'JobBaseProperties'}, + } + + def __init__( + self, + *, + job_definition: "_models.JobBaseProperties", + **kwargs + ): + """ + :keyword job_definition: Required. [Required] Defines Schedule action definition details. + :paramtype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties + """ + super(JobScheduleAction, self).__init__(**kwargs) + self.action_type = 'CreateJob' # type: str + self.job_definition = job_definition + + +class JobService(msrest.serialization.Model): + """Job endpoint definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar endpoint: Url for endpoint. + :vartype endpoint: str + :ivar error_message: Any error in the service. + :vartype error_message: str + :ivar job_service_type: Endpoint type. + :vartype job_service_type: str + :ivar port: Port for endpoint. + :vartype port: int + :ivar properties: Additional properties to set on the endpoint. + :vartype properties: dict[str, str] + :ivar status: Status of endpoint. + :vartype status: str + """ + + _validation = { + 'error_message': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'error_message': {'key': 'errorMessage', 'type': 'str'}, + 'job_service_type': {'key': 'jobServiceType', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + *, + endpoint: Optional[str] = None, + job_service_type: Optional[str] = None, + port: Optional[int] = None, + properties: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword endpoint: Url for endpoint. + :paramtype endpoint: str + :keyword job_service_type: Endpoint type. + :paramtype job_service_type: str + :keyword port: Port for endpoint. + :paramtype port: int + :keyword properties: Additional properties to set on the endpoint. + :paramtype properties: dict[str, str] + """ + super(JobService, self).__init__(**kwargs) + self.endpoint = endpoint + self.error_message = None + self.job_service_type = job_service_type + self.port = port + self.properties = properties + self.status = None + + +class KerberosCredentials(msrest.serialization.Model): + """KerberosCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: Required. [Required] Kerberos Username. + :vartype kerberos_principal: str + :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server + has the authority to authenticate a user, host or service. + :vartype kerberos_realm: str + """ + + _validation = { + 'kerberos_kdc_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'kerberos_principal': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'kerberos_realm': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, + 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, + 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, + } + + def __init__( + self, + *, + kerberos_kdc_address: str, + kerberos_principal: str, + kerberos_realm: str, + **kwargs + ): + """ + :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: Required. [Required] Kerberos Username. + :paramtype kerberos_principal: str + :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication + server has the authority to authenticate a user, host or service. + :paramtype kerberos_realm: str + """ + super(KerberosCredentials, self).__init__(**kwargs) + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + + +class KerberosKeytabCredentials(DatastoreCredentials, KerberosCredentials): + """KerberosKeytabCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: Required. [Required] Kerberos Username. + :vartype kerberos_principal: str + :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server + has the authority to authenticate a user, host or service. + :vartype kerberos_realm: str + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar secrets: Required. [Required] Keytab secrets. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets + """ + + _validation = { + 'kerberos_kdc_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'kerberos_principal': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'kerberos_realm': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials_type': {'required': True}, + 'secrets': {'required': True}, + } + + _attribute_map = { + 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, + 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, + 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'KerberosKeytabSecrets'}, + } + + def __init__( + self, + *, + kerberos_kdc_address: str, + kerberos_principal: str, + kerberos_realm: str, + secrets: "_models.KerberosKeytabSecrets", + **kwargs + ): + """ + :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: Required. [Required] Kerberos Username. + :paramtype kerberos_principal: str + :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication + server has the authority to authenticate a user, host or service. + :paramtype kerberos_realm: str + :keyword secrets: Required. [Required] Keytab secrets. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets + """ + super(KerberosKeytabCredentials, self).__init__(kerberos_kdc_address=kerberos_kdc_address, kerberos_principal=kerberos_principal, kerberos_realm=kerberos_realm, **kwargs) + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + self.credentials_type = 'KerberosKeytab' # type: str + self.secrets = secrets + + +class KerberosKeytabSecrets(DatastoreSecrets): + """KerberosKeytabSecrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "Sas", + "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar kerberos_keytab: Kerberos keytab secret. + :vartype kerberos_keytab: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'kerberos_keytab': {'key': 'kerberosKeytab', 'type': 'str'}, + } + + def __init__( + self, + *, + kerberos_keytab: Optional[str] = None, + **kwargs + ): + """ + :keyword kerberos_keytab: Kerberos keytab secret. + :paramtype kerberos_keytab: str + """ + super(KerberosKeytabSecrets, self).__init__(**kwargs) + self.secrets_type = 'KerberosKeytab' # type: str + self.kerberos_keytab = kerberos_keytab + + +class KerberosPasswordCredentials(DatastoreCredentials, KerberosCredentials): + """KerberosPasswordCredentials. + + All required parameters must be populated in order to send to Azure. + + :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :vartype kerberos_kdc_address: str + :ivar kerberos_principal: Required. [Required] Kerberos Username. + :vartype kerberos_principal: str + :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server + has the authority to authenticate a user, host or service. + :vartype kerberos_realm: str + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar secrets: Required. [Required] Kerberos password secrets. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets + """ + + _validation = { + 'kerberos_kdc_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'kerberos_principal': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'kerberos_realm': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials_type': {'required': True}, + 'secrets': {'required': True}, + } + + _attribute_map = { + 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, + 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, + 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'KerberosPasswordSecrets'}, + } + + def __init__( + self, + *, + kerberos_kdc_address: str, + kerberos_principal: str, + kerberos_realm: str, + secrets: "_models.KerberosPasswordSecrets", + **kwargs + ): + """ + :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :paramtype kerberos_kdc_address: str + :keyword kerberos_principal: Required. [Required] Kerberos Username. + :paramtype kerberos_principal: str + :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication + server has the authority to authenticate a user, host or service. + :paramtype kerberos_realm: str + :keyword secrets: Required. [Required] Kerberos password secrets. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets + """ + super(KerberosPasswordCredentials, self).__init__(kerberos_kdc_address=kerberos_kdc_address, kerberos_principal=kerberos_principal, kerberos_realm=kerberos_realm, **kwargs) + self.kerberos_kdc_address = kerberos_kdc_address + self.kerberos_principal = kerberos_principal + self.kerberos_realm = kerberos_realm + self.credentials_type = 'KerberosPassword' # type: str + self.secrets = secrets + + +class KerberosPasswordSecrets(DatastoreSecrets): + """KerberosPasswordSecrets. + + All required parameters must be populated in order to send to Azure. + + :ivar secrets_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "Sas", + "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar kerberos_password: Kerberos password secret. + :vartype kerberos_password: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'kerberos_password': {'key': 'kerberosPassword', 'type': 'str'}, + } + + def __init__( + self, + *, + kerberos_password: Optional[str] = None, + **kwargs + ): + """ + :keyword kerberos_password: Kerberos password secret. + :paramtype kerberos_password: str + """ + super(KerberosPasswordSecrets, self).__init__(**kwargs) + self.secrets_type = 'KerberosPassword' # type: str + self.kerberos_password = kerberos_password + + +class KubernetesSchema(msrest.serialization.Model): + """Kubernetes Compute Schema. + + :ivar properties: Properties of Kubernetes. + :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.KubernetesProperties"] = None, + **kwargs + ): + """ + :keyword properties: Properties of Kubernetes. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + """ + super(KubernetesSchema, self).__init__(**kwargs) + self.properties = properties + + +class Kubernetes(Compute, KubernetesSchema): + """A Machine Learning compute based on Kubernetes Compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar properties: Properties of Kubernetes. + :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + properties: Optional["_models.KubernetesProperties"] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + """ + :keyword properties: Properties of Kubernetes. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(Kubernetes, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + self.properties = properties + self.compute_type = 'Kubernetes' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): + """OnlineDeploymentProperties. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: KubernetesOnlineDeployment, ManagedOnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled", + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant + filled by server. Known values are: "Managed", "Kubernetes", "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'str'}, + 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + } + + _subtype_map = { + 'endpoint_compute_type': {'Kubernetes': 'KubernetesOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'} + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: Optional[bool] = False, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled", + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + super(OnlineDeploymentProperties, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, **kwargs) + self.app_insights_enabled = app_insights_enabled + self.egress_public_network_access = egress_public_network_access + self.endpoint_compute_type = 'OnlineDeploymentProperties' # type: str + self.instance_type = instance_type + self.liveness_probe = liveness_probe + self.model = model + self.model_mount_path = model_mount_path + self.provisioning_state = None + self.readiness_probe = readiness_probe + self.request_settings = request_settings + self.scale_settings = scale_settings + + +class KubernetesOnlineDeployment(OnlineDeploymentProperties): + """Properties specific to a KubernetesOnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled", + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant + filled by server. Known values are: "Managed", "Kubernetes", "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :ivar container_resource_requirements: The resource requirements for the container (cpu and + memory). + :vartype container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'str'}, + 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: Optional[bool] = False, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + container_resource_requirements: Optional["_models.ContainerResourceRequirements"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled", + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :keyword container_resource_requirements: The resource requirements for the container (cpu and + memory). + :paramtype container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + super(KubernetesOnlineDeployment, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, app_insights_enabled=app_insights_enabled, egress_public_network_access=egress_public_network_access, instance_type=instance_type, liveness_probe=liveness_probe, model=model, model_mount_path=model_mount_path, readiness_probe=readiness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs) + self.endpoint_compute_type = 'Kubernetes' # type: str + self.container_resource_requirements = container_resource_requirements + + +class KubernetesProperties(msrest.serialization.Model): + """Kubernetes properties. + + :ivar relay_connection_string: Relay connection string. + :vartype relay_connection_string: str + :ivar service_bus_connection_string: ServiceBus connection string. + :vartype service_bus_connection_string: str + :ivar extension_principal_id: Extension principal-id. + :vartype extension_principal_id: str + :ivar extension_instance_release_train: Extension instance release train. + :vartype extension_instance_release_train: str + :ivar vc_name: VC name. + :vartype vc_name: str + :ivar namespace: Compute namespace. + :vartype namespace: str + :ivar default_instance_type: Default instance type. + :vartype default_instance_type: str + :ivar instance_types: Instance Type Schema. + :vartype instance_types: dict[str, + ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + """ + + _attribute_map = { + 'relay_connection_string': {'key': 'relayConnectionString', 'type': 'str'}, + 'service_bus_connection_string': {'key': 'serviceBusConnectionString', 'type': 'str'}, + 'extension_principal_id': {'key': 'extensionPrincipalId', 'type': 'str'}, + 'extension_instance_release_train': {'key': 'extensionInstanceReleaseTrain', 'type': 'str'}, + 'vc_name': {'key': 'vcName', 'type': 'str'}, + 'namespace': {'key': 'namespace', 'type': 'str'}, + 'default_instance_type': {'key': 'defaultInstanceType', 'type': 'str'}, + 'instance_types': {'key': 'instanceTypes', 'type': '{InstanceTypeSchema}'}, + } + + def __init__( + self, + *, + relay_connection_string: Optional[str] = None, + service_bus_connection_string: Optional[str] = None, + extension_principal_id: Optional[str] = None, + extension_instance_release_train: Optional[str] = None, + vc_name: Optional[str] = None, + namespace: Optional[str] = "default", + default_instance_type: Optional[str] = None, + instance_types: Optional[Dict[str, "_models.InstanceTypeSchema"]] = None, + **kwargs + ): + """ + :keyword relay_connection_string: Relay connection string. + :paramtype relay_connection_string: str + :keyword service_bus_connection_string: ServiceBus connection string. + :paramtype service_bus_connection_string: str + :keyword extension_principal_id: Extension principal-id. + :paramtype extension_principal_id: str + :keyword extension_instance_release_train: Extension instance release train. + :paramtype extension_instance_release_train: str + :keyword vc_name: VC name. + :paramtype vc_name: str + :keyword namespace: Compute namespace. + :paramtype namespace: str + :keyword default_instance_type: Default instance type. + :paramtype default_instance_type: str + :keyword instance_types: Instance Type Schema. + :paramtype instance_types: dict[str, + ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] + """ + super(KubernetesProperties, self).__init__(**kwargs) + self.relay_connection_string = relay_connection_string + self.service_bus_connection_string = service_bus_connection_string + self.extension_principal_id = extension_principal_id + self.extension_instance_release_train = extension_instance_release_train + self.vc_name = vc_name + self.namespace = namespace + self.default_instance_type = default_instance_type + self.instance_types = instance_types + + +class LabelCategory(msrest.serialization.Model): + """Label category definition. + + :ivar classes: Dictionary of label classes in this category. + :vartype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + :ivar display_name: Display name of the label category. + :vartype display_name: str + :ivar multi_select_enabled: Indicates whether it is allowed to select multiple classes in this + category. + :vartype multi_select_enabled: bool + """ + + _attribute_map = { + 'classes': {'key': 'classes', 'type': '{LabelClass}'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'multi_select_enabled': {'key': 'multiSelectEnabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + classes: Optional[Dict[str, "_models.LabelClass"]] = None, + display_name: Optional[str] = None, + multi_select_enabled: Optional[bool] = False, + **kwargs + ): + """ + :keyword classes: Dictionary of label classes in this category. + :paramtype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + :keyword display_name: Display name of the label category. + :paramtype display_name: str + :keyword multi_select_enabled: Indicates whether it is allowed to select multiple classes in + this category. + :paramtype multi_select_enabled: bool + """ + super(LabelCategory, self).__init__(**kwargs) + self.classes = classes + self.display_name = display_name + self.multi_select_enabled = multi_select_enabled + + +class LabelClass(msrest.serialization.Model): + """Label class definition. + + :ivar display_name: Display name of the label class. + :vartype display_name: str + :ivar subclasses: Dictionary of subclasses of the label class. + :vartype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'}, + } + + def __init__( + self, + *, + display_name: Optional[str] = None, + subclasses: Optional[Dict[str, "_models.LabelClass"]] = None, + **kwargs + ): + """ + :keyword display_name: Display name of the label class. + :paramtype display_name: str + :keyword subclasses: Dictionary of subclasses of the label class. + :paramtype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + super(LabelClass, self).__init__(**kwargs) + self.display_name = display_name + self.subclasses = subclasses + + +class LabelingDataConfiguration(msrest.serialization.Model): + """Labeling data configuration definition. + + :ivar data_id: Resource Id of the data asset to perform labeling. + :vartype data_id: str + :ivar incremental_data_refresh_enabled: Indicates whether to enable incremental data refresh. + :vartype incremental_data_refresh_enabled: bool + """ + + _attribute_map = { + 'data_id': {'key': 'dataId', 'type': 'str'}, + 'incremental_data_refresh_enabled': {'key': 'incrementalDataRefreshEnabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + data_id: Optional[str] = None, + incremental_data_refresh_enabled: Optional[bool] = False, + **kwargs + ): + """ + :keyword data_id: Resource Id of the data asset to perform labeling. + :paramtype data_id: str + :keyword incremental_data_refresh_enabled: Indicates whether to enable incremental data + refresh. + :paramtype incremental_data_refresh_enabled: bool + """ + super(LabelingDataConfiguration, self).__init__(**kwargs) + self.data_id = data_id + self.incremental_data_refresh_enabled = incremental_data_refresh_enabled + + +class LabelingJob(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'LabelingJobProperties'}, + } + + def __init__( + self, + *, + properties: "_models.LabelingJobProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties + """ + super(LabelingJob, self).__init__(**kwargs) + self.properties = properties + + +class LabelingJobMediaProperties(msrest.serialization.Model): + """Properties of a labeling job. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. Known + values are: "Image", "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + } + + _subtype_map = { + 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(LabelingJobMediaProperties, self).__init__(**kwargs) + self.media_type = None # type: Optional[str] + + +class LabelingJobImageProperties(LabelingJobMediaProperties): + """Properties of a labeling job for image data. + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. Known + values are: "Image", "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + :ivar annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", "InstanceSegmentation". + :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + } + + def __init__( + self, + *, + annotation_type: Optional[Union[str, "_models.ImageAnnotationType"]] = None, + **kwargs + ): + """ + :keyword annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", "InstanceSegmentation". + :paramtype annotation_type: str or + ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + super(LabelingJobImageProperties, self).__init__(**kwargs) + self.media_type = 'Image' # type: str + self.annotation_type = annotation_type + + +class LabelingJobInstructions(msrest.serialization.Model): + """Instructions for labeling job. + + :ivar uri: The link to a page with detailed labeling instructions for labelers. + :vartype uri: str + """ + + _attribute_map = { + 'uri': {'key': 'uri', 'type': 'str'}, + } + + def __init__( + self, + *, + uri: Optional[str] = None, + **kwargs + ): + """ + :keyword uri: The link to a page with detailed labeling instructions for labelers. + :paramtype uri: str + """ + super(LabelingJobInstructions, self).__init__(**kwargs) + self.uri = uri + + +class LabelingJobProperties(JobBaseProperties): + """Labeling job definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. Known + values are: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar created_date_time: Created time of the job in UTC timezone. + :vartype created_date_time: ~datetime.datetime + :ivar data_configuration: Configuration of data used in the job. + :vartype data_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration + :ivar job_instructions: Labeling instructions of the job. + :vartype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :ivar label_categories: Label categories of the job. + :vartype label_categories: dict[str, ~azure.mgmt.machinelearningservices.models.LabelCategory] + :ivar labeling_job_media_properties: Media type specific properties in the job. + :vartype labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :ivar ml_assist_configuration: Configuration of MLAssist feature in the job. + :vartype ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + :ivar progress_metrics: Progress metrics of the job. + :vartype progress_metrics: ~azure.mgmt.machinelearningservices.models.ProgressMetrics + :ivar project_id: Internal id of the job(Previously called project). + :vartype project_id: str + :ivar provisioning_state: Specifies the labeling job provisioning state. Known values are: + "Succeeded", "Failed", "Canceled", "InProgress". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :ivar status_messages: Status messages of the job. + :vartype status_messages: list[~azure.mgmt.machinelearningservices.models.StatusMessage] + """ + + _validation = { + 'job_type': {'required': True}, + 'status': {'readonly': True}, + 'created_date_time': {'readonly': True}, + 'progress_metrics': {'readonly': True}, + 'project_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'status_messages': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'component_id': {'key': 'componentId', 'type': 'str'}, + 'compute_id': {'key': 'computeId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'services': {'key': 'services', 'type': '{JobService}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, + 'data_configuration': {'key': 'dataConfiguration', 'type': 'LabelingDataConfiguration'}, + 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'}, + 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'}, + 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'}, + 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MLAssistConfiguration'}, + 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'}, + 'project_id': {'key': 'projectId', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: Optional[str] = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: Optional[bool] = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + data_configuration: Optional["_models.LabelingDataConfiguration"] = None, + job_instructions: Optional["_models.LabelingJobInstructions"] = None, + label_categories: Optional[Dict[str, "_models.LabelCategory"]] = None, + labeling_job_media_properties: Optional["_models.LabelingJobMediaProperties"] = None, + ml_assist_configuration: Optional["_models.MLAssistConfiguration"] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword data_configuration: Configuration of data used in the job. + :paramtype data_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration + :keyword job_instructions: Labeling instructions of the job. + :paramtype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :keyword label_categories: Label categories of the job. + :paramtype label_categories: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelCategory] + :keyword labeling_job_media_properties: Media type specific properties in the job. + :paramtype labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :keyword ml_assist_configuration: Configuration of MLAssist feature in the job. + :paramtype ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + """ + super(LabelingJobProperties, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, services=services, **kwargs) + self.job_type = 'Labeling' # type: str + self.created_date_time = None + self.data_configuration = data_configuration + self.job_instructions = job_instructions + self.label_categories = label_categories + self.labeling_job_media_properties = labeling_job_media_properties + self.ml_assist_configuration = ml_assist_configuration + self.progress_metrics = None + self.project_id = None + self.provisioning_state = None + self.status_messages = None + + +class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of LabelingJob entities. + + :ivar next_link: The link to the next page of LabelingJob objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type LabelingJob. + :vartype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[LabelingJob]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.LabelingJob"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of LabelingJob objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type LabelingJob. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] + """ + super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class LabelingJobTextProperties(LabelingJobMediaProperties): + """Properties of a labeling job for text data. + + All required parameters must be populated in order to send to Azure. + + :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. Known + values are: "Image", "Text". + :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType + :ivar annotation_type: Annotation type of text labeling job. Known values are: + "Classification", "NamedEntityRecognition". + :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + } + + def __init__( + self, + *, + annotation_type: Optional[Union[str, "_models.TextAnnotationType"]] = None, + **kwargs + ): + """ + :keyword annotation_type: Annotation type of text labeling job. Known values are: + "Classification", "NamedEntityRecognition". + :paramtype annotation_type: str or + ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + super(LabelingJobTextProperties, self).__init__(**kwargs) + self.media_type = 'Text' # type: str + self.annotation_type = annotation_type + + +class ListAmlUserFeatureResult(msrest.serialization.Model): + """The List Aml user feature operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML user facing features. + :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :ivar next_link: The URI to fetch the next page of AML user features information. Call + ListNext() with this to fetch the next page of AML user features information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ListAmlUserFeatureResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListNotebookKeysResult(msrest.serialization.Model): + """ListNotebookKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar primary_access_key: + :vartype primary_access_key: str + :ivar secondary_access_key: + :vartype secondary_access_key: str + """ + + _validation = { + 'primary_access_key': {'readonly': True}, + 'secondary_access_key': {'readonly': True}, + } + + _attribute_map = { + 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, + 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ListNotebookKeysResult, self).__init__(**kwargs) + self.primary_access_key = None + self.secondary_access_key = None + + +class ListStorageAccountKeysResult(msrest.serialization.Model): + """ListStorageAccountKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ListStorageAccountKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + + +class ListUsagesResult(msrest.serialization.Model): + """The List Usages operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML resource usages. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] + :ivar next_link: The URI to fetch the next page of AML resource usage information. Call + ListNext() with this to fetch the next page of AML resource usage information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Usage]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ListUsagesResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListWorkspaceKeysResult(msrest.serialization.Model): + """ListWorkspaceKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + :ivar user_storage_resource_id: + :vartype user_storage_resource_id: str + :ivar app_insights_instrumentation_key: + :vartype app_insights_instrumentation_key: str + :ivar container_registry_credentials: + :vartype container_registry_credentials: + ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :ivar notebook_access_keys: + :vartype notebook_access_keys: + ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + 'user_storage_resource_id': {'readonly': True}, + 'app_insights_instrumentation_key': {'readonly': True}, + 'container_registry_credentials': {'readonly': True}, + 'notebook_access_keys': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, + 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, + 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, + 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ListWorkspaceKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + self.user_storage_resource_id = None + self.app_insights_instrumentation_key = None + self.container_registry_credentials = None + self.notebook_access_keys = None + + +class ListWorkspaceQuotas(msrest.serialization.Model): + """The List WorkspaceQuotasByVMFamily operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of Workspace Quotas by VM Family. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. + Call ListNext() with this to fetch the next page of Workspace Quota information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ResourceQuota]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ListWorkspaceQuotas, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class LiteralJobInput(JobInput): + """Literal input type. + + All required parameters must be populated in order to send to Azure. + + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar value: Required. [Required] Literal value for the input. + :vartype value: str + """ + + _validation = { + 'job_input_type': {'required': True}, + 'value': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + value: str, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword description: Description for the input. + :paramtype description: str + :keyword value: Required. [Required] Literal value for the input. + :paramtype value: str + """ + super(LiteralJobInput, self).__init__(description=description, **kwargs) + self.job_input_type = 'literal' # type: str + self.value = value + + +class ManagedIdentity(IdentityConfiguration): + """Managed identity configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant + filled by server. Known values are: "Managed", "AMLToken", "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType + :ivar client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not + set this field. + :vartype client_id: str + :ivar object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not + set this field. + :vartype object_id: str + :ivar resource_id: Specifies a user-assigned identity by ARM resource ID. For system-assigned, + do not set this field. + :vartype resource_id: str + """ + + _validation = { + 'identity_type': {'required': True}, + } + + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'object_id': {'key': 'objectId', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + client_id: Optional[str] = None, + object_id: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + """ + :keyword client_id: Specifies a user-assigned identity by client ID. For system-assigned, do + not set this field. + :paramtype client_id: str + :keyword object_id: Specifies a user-assigned identity by object ID. For system-assigned, do + not set this field. + :paramtype object_id: str + :keyword resource_id: Specifies a user-assigned identity by ARM resource ID. For + system-assigned, do not set this field. + :paramtype resource_id: str + """ + super(ManagedIdentity, self).__init__(**kwargs) + self.identity_type = 'Managed' # type: str + self.client_id = client_id + self.object_id = object_id + self.resource_id = resource_id + + +class WorkspaceConnectionPropertiesV2(msrest.serialization.Model): + """WorkspaceConnectionPropertiesV2. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIdentityAuthTypeWorkspaceConnectionProperties, NoneAuthTypeWorkspaceConnectionProperties, PATAuthTypeWorkspaceConnectionProperties, SASAuthTypeWorkspaceConnectionProperties, UsernamePasswordAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Known values are: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. Known values are: "JSON". + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + + _validation = { + 'auth_type': {'required': True}, + } + + _attribute_map = { + 'auth_type': {'key': 'authType', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'value_format': {'key': 'valueFormat', 'type': 'str'}, + } + + _subtype_map = { + 'auth_type': {'ManagedIdentity': 'ManagedIdentityAuthTypeWorkspaceConnectionProperties', 'None': 'NoneAuthTypeWorkspaceConnectionProperties', 'PAT': 'PATAuthTypeWorkspaceConnectionProperties', 'SAS': 'SASAuthTypeWorkspaceConnectionProperties', 'UsernamePassword': 'UsernamePasswordAuthTypeWorkspaceConnectionProperties'} + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. Known values are: "JSON". + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + super(WorkspaceConnectionPropertiesV2, self).__init__(**kwargs) + self.auth_type = None # type: Optional[str] + self.category = category + self.target = target + self.value = value + self.value_format = value_format + + +class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """ManagedIdentityAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Known values are: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. Known values are: "JSON". + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity + """ + + _validation = { + 'auth_type': {'required': True}, + } + + _attribute_map = { + 'auth_type': {'key': 'authType', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'value_format': {'key': 'valueFormat', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionManagedIdentity'}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionManagedIdentity"] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. Known values are: "JSON". + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity + """ + super(ManagedIdentityAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = 'ManagedIdentity' # type: str + self.credentials = credentials + + +class ManagedOnlineDeployment(OnlineDeploymentProperties): + """Properties specific to a ManagedOnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar code_configuration: Code configuration for the endpoint deployment. + :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :ivar description: Description of the endpoint deployment. + :vartype description: str + :ivar environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :vartype environment_id: str + :ivar environment_variables: Environment variables configuration for the deployment. + :vartype environment_variables: dict[str, str] + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar app_insights_enabled: If true, enables Application Insights logging. + :vartype app_insights_enabled: bool + :ivar egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled", + "Disabled". + :vartype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant + filled by server. Known values are: "Managed", "Kubernetes", "AzureMLCompute". + :vartype endpoint_compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :ivar instance_type: Compute instance type. + :vartype instance_type: str + :ivar liveness_probe: Liveness probe monitors the health of the container regularly. + :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar model: The URI path to the model. + :vartype model: str + :ivar model_mount_path: The path to mount the model in custom container. + :vartype model_mount_path: str + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :ivar request_settings: Request settings for the deployment. + :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :ivar scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'str'}, + 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + } + + def __init__( + self, + *, + code_configuration: Optional["_models.CodeConfiguration"] = None, + description: Optional[str] = None, + environment_id: Optional[str] = None, + environment_variables: Optional[Dict[str, str]] = None, + properties: Optional[Dict[str, str]] = None, + app_insights_enabled: Optional[bool] = False, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, + instance_type: Optional[str] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, + model: Optional[str] = None, + model_mount_path: Optional[str] = None, + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + **kwargs + ): + """ + :keyword code_configuration: Code configuration for the endpoint deployment. + :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :keyword description: Description of the endpoint deployment. + :paramtype description: str + :keyword environment_id: ARM resource ID of the environment specification for the endpoint + deployment. + :paramtype environment_id: str + :keyword environment_variables: Environment variables configuration for the deployment. + :paramtype environment_variables: dict[str, str] + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword app_insights_enabled: If true, enables Application Insights logging. + :paramtype app_insights_enabled: bool + :keyword egress_public_network_access: If Enabled, allow egress public network access. If + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled", + "Disabled". + :paramtype egress_public_network_access: str or + ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType + :keyword instance_type: Compute instance type. + :paramtype instance_type: str + :keyword liveness_probe: Liveness probe monitors the health of the container regularly. + :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword model: The URI path to the model. + :paramtype model: str + :keyword model_mount_path: The path to mount the model in custom container. + :paramtype model_mount_path: str + :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. + The properties and defaults are the same as liveness probe. + :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings + :keyword request_settings: Request settings for the deployment. + :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :keyword scale_settings: Scale settings for the deployment. + If it is null or not provided, + it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment + and to DefaultScaleSettings for ManagedOnlineDeployment. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + """ + super(ManagedOnlineDeployment, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, app_insights_enabled=app_insights_enabled, egress_public_network_access=egress_public_network_access, instance_type=instance_type, liveness_probe=liveness_probe, model=model, model_mount_path=model_mount_path, readiness_probe=readiness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs) + self.endpoint_compute_type = 'Managed' # type: str + + +class ManagedServiceIdentity(msrest.serialization.Model): + """Managed service identity (system assigned and/or user assigned identities). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar principal_id: The service principal ID of the system assigned identity. This property + will only be provided for a system assigned identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be + provided for a system assigned identity. + :vartype tenant_id: str + :ivar type: Required. Type of managed service identity (where both SystemAssigned and + UserAssigned types are allowed). Known values are: "None", "SystemAssigned", "UserAssigned", + "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :ivar user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :vartype user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + *, + type: Union[str, "_models.ManagedServiceIdentityType"], + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + **kwargs + ): + """ + :keyword type: Required. Type of managed service identity (where both SystemAssigned and + UserAssigned types are allowed). Known values are: "None", "SystemAssigned", "UserAssigned", + "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :keyword user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :paramtype user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + """ + super(ManagedServiceIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class MedianStoppingPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on running averages of the primary metric of all runs. + + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. + Known values are: "Bandit", "MedianStopping", "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + """ + + _validation = { + 'policy_type': {'required': True}, + } + + _attribute_map = { + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + } + + def __init__( + self, + *, + delay_evaluation: Optional[int] = 0, + evaluation_interval: Optional[int] = 0, + **kwargs + ): + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + """ + super(MedianStoppingPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type = 'MedianStopping' # type: str + + +class MLAssistConfiguration(msrest.serialization.Model): + """Labeling MLAssist configuration definition. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: MLAssistConfigurationDisabled, MLAssistConfigurationEnabled. + + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant + filled by server. Known values are: "Enabled", "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType + """ + + _validation = { + 'ml_assist': {'required': True}, + } + + _attribute_map = { + 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, + } + + _subtype_map = { + 'ml_assist': {'Disabled': 'MLAssistConfigurationDisabled', 'Enabled': 'MLAssistConfigurationEnabled'} + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(MLAssistConfiguration, self).__init__(**kwargs) + self.ml_assist = None # type: Optional[str] + + +class MLAssistConfigurationDisabled(MLAssistConfiguration): + """Labeling MLAssist configuration definition when MLAssist is disabled. + + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant + filled by server. Known values are: "Enabled", "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType + """ + + _validation = { + 'ml_assist': {'required': True}, + } + + _attribute_map = { + 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(MLAssistConfigurationDisabled, self).__init__(**kwargs) + self.ml_assist = 'Disabled' # type: str + + +class MLAssistConfigurationEnabled(MLAssistConfiguration): + """Labeling MLAssist configuration definition when MLAssist is enabled. + + All required parameters must be populated in order to send to Azure. + + :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant + filled by server. Known values are: "Enabled", "Disabled". + :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType + :ivar inferencing_compute_binding: Required. [Required] AML compute binding used in + inferencing. + :vartype inferencing_compute_binding: str + :ivar training_compute_binding: Required. [Required] AML compute binding used in training. + :vartype training_compute_binding: str + """ + + _validation = { + 'ml_assist': {'required': True}, + 'inferencing_compute_binding': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'training_compute_binding': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, + 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'str'}, + 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'str'}, + } + + def __init__( + self, + *, + inferencing_compute_binding: str, + training_compute_binding: str, + **kwargs + ): + """ + :keyword inferencing_compute_binding: Required. [Required] AML compute binding used in + inferencing. + :paramtype inferencing_compute_binding: str + :keyword training_compute_binding: Required. [Required] AML compute binding used in training. + :paramtype training_compute_binding: str + """ + super(MLAssistConfigurationEnabled, self).__init__(**kwargs) + self.ml_assist = 'Enabled' # type: str + self.inferencing_compute_binding = inferencing_compute_binding + self.training_compute_binding = training_compute_binding + + +class MLFlowModelJobInput(JobInput, AssetJobInput): + """MLFlowModelJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: Required. [Required] Input Asset URI. + :vartype uri: str + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + """ + + _validation = { + 'uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'job_input_type': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + } + + def __init__( + self, + *, + uri: str, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: Required. [Required] Input Asset URI. + :paramtype uri: str + :keyword description: Description for the input. + :paramtype description: str + """ + super(MLFlowModelJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_input_type = 'mlflow_model' # type: str + self.description = description + + +class MLFlowModelJobOutput(JobOutput, AssetJobOutput): + """MLFlowModelJobOutput. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by + server. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + """ + + _validation = { + 'job_output_type': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + } + + def __init__( + self, + *, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + :keyword description: Description for the output. + :paramtype description: str + """ + super(MLFlowModelJobOutput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_output_type = 'mlflow_model' # type: str + self.description = description + + +class MLTableData(DataVersionBaseProperties): + """MLTable data definition. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. + Known values are: "uri_file", "uri_folder", "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :vartype data_uri: str + :ivar referenced_uris: Uris referenced in the MLTable definition (required for lineage). + :vartype referenced_uris: list[str] + """ + + _validation = { + 'data_type': {'required': True}, + 'data_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'data_uri': {'key': 'dataUri', 'type': 'str'}, + 'referenced_uris': {'key': 'referencedUris', 'type': '[str]'}, + } + + def __init__( + self, + *, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, + referenced_uris: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :paramtype data_uri: str + :keyword referenced_uris: Uris referenced in the MLTable definition (required for lineage). + :paramtype referenced_uris: list[str] + """ + super(MLTableData, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, **kwargs) + self.data_type = 'mltable' # type: str + self.referenced_uris = referenced_uris + + +class MLTableJobInput(JobInput, AssetJobInput): + """MLTableJobInput. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: Required. [Required] Input Asset URI. + :vartype uri: str + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + """ + + _validation = { + 'uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'job_input_type': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + } + + def __init__( + self, + *, + uri: str, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: Required. [Required] Input Asset URI. + :paramtype uri: str + :keyword description: Description for the input. + :paramtype description: str + """ + super(MLTableJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_input_type = 'mltable' # type: str + self.description = description + + +class MLTableJobOutput(JobOutput, AssetJobOutput): + """MLTableJobOutput. + + All required parameters must be populated in order to send to Azure. + + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by + server. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType + """ + + _validation = { + 'job_output_type': {'required': True}, + } + + _attribute_map = { + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + } + + def __init__( + self, + *, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + :keyword description: Description for the output. + :paramtype description: str + """ + super(MLTableJobOutput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_output_type = 'mltable' # type: str + self.description = description + + +class ModelContainer(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'ModelContainerProperties'}, + } + + def __init__( + self, + *, + properties: "_models.ModelContainerProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties + """ + super(ModelContainer, self).__init__(**kwargs) + self.properties = properties + + +class ModelContainerProperties(AssetContainer): + """ModelContainerProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar latest_version: The latest version inside this container. + :vartype latest_version: str + :ivar next_version: The next auto incremental version. + :vartype next_version: str + """ + + _validation = { + 'latest_version': {'readonly': True}, + 'next_version': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'latest_version': {'key': 'latestVersion', 'type': 'str'}, + 'next_version': {'key': 'nextVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_archived: Optional[bool] = False, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + """ + super(ModelContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + + +class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of ModelContainer entities. + + :ivar next_link: The link to the next page of ModelContainer objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ModelContainer. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[ModelContainer]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.ModelContainer"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of ModelContainer objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ModelContainer. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] + """ + super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ModelVersion(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'ModelVersionProperties'}, + } + + def __init__( + self, + *, + properties: "_models.ModelVersionProperties", + **kwargs + ): + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties + """ + super(ModelVersion, self).__init__(**kwargs) + self.properties = properties + + +class ModelVersionProperties(AssetBase): + """Model asset version details. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar flavors: Mapping of model flavors to their properties. + :vartype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] + :ivar job_name: Name of the training job which produced this model. + :vartype job_name: str + :ivar model_type: The storage format for this entity. Used for NCD. + :vartype model_type: str + :ivar model_uri: The URI path to the model contents. + :vartype model_uri: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'flavors': {'key': 'flavors', 'type': '{FlavorData}'}, + 'job_name': {'key': 'jobName', 'type': 'str'}, + 'model_type': {'key': 'modelType', 'type': 'str'}, + 'model_uri': {'key': 'modelUri', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, + flavors: Optional[Dict[str, "_models.FlavorData"]] = None, + job_name: Optional[str] = None, + model_type: Optional[str] = None, + model_uri: Optional[str] = None, + **kwargs + ): + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword flavors: Mapping of model flavors to their properties. + :paramtype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] + :keyword job_name: Name of the training job which produced this model. + :paramtype job_name: str + :keyword model_type: The storage format for this entity. Used for NCD. + :paramtype model_type: str + :keyword model_uri: The URI path to the model contents. + :paramtype model_uri: str + """ + super(ModelVersionProperties, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + self.flavors = flavors + self.job_name = job_name + self.model_type = model_type + self.model_uri = model_uri + + +class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of ModelVersion entities. + + :ivar next_link: The link to the next page of ModelVersion objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type ModelVersion. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[ModelVersion]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.ModelVersion"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of ModelVersion objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type ModelVersion. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] + """ + super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class Mpi(DistributionConfiguration): + """MPI distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar distribution_type: Required. [Required] Specifies the type of distribution + framework.Constant filled by server. Known values are: "PyTorch", "TensorFlow", "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar process_count_per_instance: Number of processes per MPI node. + :vartype process_count_per_instance: int + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + } + + def __init__( + self, + *, + process_count_per_instance: Optional[int] = None, + **kwargs + ): + """ + :keyword process_count_per_instance: Number of processes per MPI node. + :paramtype process_count_per_instance: int + """ + super(Mpi, self).__init__(**kwargs) + self.distribution_type = 'Mpi' # type: str + self.process_count_per_instance = process_count_per_instance + + +class NlpVertical(msrest.serialization.Model): + """Abstract class for NLP related AutoML tasks. +NLP - Natural Language Processing. + + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + + _attribute_map = { + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + } + + def __init__( + self, + *, + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + **kwargs + ): + """ + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super(NlpVertical, self).__init__(**kwargs) + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.validation_data = validation_data + + +class NlpVerticalFeaturizationSettings(FeaturizationSettings): + """NlpVerticalFeaturizationSettings. + + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str + """ + + _attribute_map = { + 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, + } + + def __init__( + self, + *, + dataset_language: Optional[str] = None, + **kwargs + ): + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + """ + super(NlpVerticalFeaturizationSettings, self).__init__(dataset_language=dataset_language, **kwargs) + + +class NlpVerticalLimitSettings(msrest.serialization.Model): + """Job execution constraints. + + :ivar max_concurrent_trials: Maximum Concurrent AutoML iterations. + :vartype max_concurrent_trials: int + :ivar max_trials: Number of AutoML iterations. + :vartype max_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, + 'max_trials': {'key': 'maxTrials', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__( + self, + *, + max_concurrent_trials: Optional[int] = 1, + max_trials: Optional[int] = 1, + timeout: Optional[datetime.timedelta] = None, + **kwargs + ): + """ + :keyword max_concurrent_trials: Maximum Concurrent AutoML iterations. + :paramtype max_concurrent_trials: int + :keyword max_trials: Number of AutoML iterations. + :paramtype max_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + """ + super(NlpVerticalLimitSettings, self).__init__(**kwargs) + self.max_concurrent_trials = max_concurrent_trials + self.max_trials = max_trials + self.timeout = timeout + + +class NodeStateCounts(msrest.serialization.Model): + """Counts of various compute node states on the amlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar idle_node_count: Number of compute nodes in idle state. + :vartype idle_node_count: int + :ivar running_node_count: Number of compute nodes which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Number of compute nodes which are being prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Number of compute nodes which are in unusable state. + :vartype unusable_node_count: int + :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. + :vartype leaving_node_count: int + :ivar preempted_node_count: Number of compute nodes which are in preempted state. + :vartype preempted_node_count: int + """ + + _validation = { + 'idle_node_count': {'readonly': True}, + 'running_node_count': {'readonly': True}, + 'preparing_node_count': {'readonly': True}, + 'unusable_node_count': {'readonly': True}, + 'leaving_node_count': {'readonly': True}, + 'preempted_node_count': {'readonly': True}, + } + + _attribute_map = { + 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, + 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, + 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, + 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, + 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, + 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(NodeStateCounts, self).__init__(**kwargs) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None + self.preempted_node_count = None + + +class NoneAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """NoneAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Known values are: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. Known values are: "JSON". + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + + _validation = { + 'auth_type': {'required': True}, + } + + _attribute_map = { + 'auth_type': {'key': 'authType', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'value_format': {'key': 'valueFormat', 'type': 'str'}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. Known values are: "JSON". + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + super(NoneAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = 'None' # type: str + + +class NoneDatastoreCredentials(DatastoreCredentials): + """Empty/none datastore credentials. + + All required parameters must be populated in order to send to Azure. + + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(NoneDatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = 'None' # type: str + + +class NotebookAccessTokenResult(msrest.serialization.Model): + """NotebookAccessTokenResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar notebook_resource_id: + :vartype notebook_resource_id: str + :ivar host_name: + :vartype host_name: str + :ivar public_dns: + :vartype public_dns: str + :ivar access_token: + :vartype access_token: str + :ivar token_type: + :vartype token_type: str + :ivar expires_in: + :vartype expires_in: int + :ivar refresh_token: + :vartype refresh_token: str + :ivar scope: + :vartype scope: str + """ + + _validation = { + 'notebook_resource_id': {'readonly': True}, + 'host_name': {'readonly': True}, + 'public_dns': {'readonly': True}, + 'access_token': {'readonly': True}, + 'token_type': {'readonly': True}, + 'expires_in': {'readonly': True}, + 'refresh_token': {'readonly': True}, + 'scope': {'readonly': True}, + } + + _attribute_map = { + 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + 'public_dns': {'key': 'publicDns', 'type': 'str'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'token_type': {'key': 'tokenType', 'type': 'str'}, + 'expires_in': {'key': 'expiresIn', 'type': 'int'}, + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'scope': {'key': 'scope', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(NotebookAccessTokenResult, self).__init__(**kwargs) + self.notebook_resource_id = None + self.host_name = None + self.public_dns = None + self.access_token = None + self.token_type = None + self.expires_in = None + self.refresh_token = None + self.scope = None + + +class NotebookPreparationError(msrest.serialization.Model): + """NotebookPreparationError. + + :ivar error_message: + :vartype error_message: str + :ivar status_code: + :vartype status_code: int + """ + + _attribute_map = { + 'error_message': {'key': 'errorMessage', 'type': 'str'}, + 'status_code': {'key': 'statusCode', 'type': 'int'}, + } + + def __init__( + self, + *, + error_message: Optional[str] = None, + status_code: Optional[int] = None, + **kwargs + ): + """ + :keyword error_message: + :paramtype error_message: str + :keyword status_code: + :paramtype status_code: int + """ + super(NotebookPreparationError, self).__init__(**kwargs) + self.error_message = error_message + self.status_code = status_code + + +class NotebookResourceInfo(msrest.serialization.Model): + """NotebookResourceInfo. + + :ivar fqdn: + :vartype fqdn: str + :ivar resource_id: the data plane resourceId that used to initialize notebook component. + :vartype resource_id: str + :ivar notebook_preparation_error: The error that occurs when preparing notebook. + :vartype notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + """ + + _attribute_map = { + 'fqdn': {'key': 'fqdn', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, + } + + def __init__( + self, + *, + fqdn: Optional[str] = None, + resource_id: Optional[str] = None, + notebook_preparation_error: Optional["_models.NotebookPreparationError"] = None, + **kwargs + ): + """ + :keyword fqdn: + :paramtype fqdn: str + :keyword resource_id: the data plane resourceId that used to initialize notebook component. + :paramtype resource_id: str + :keyword notebook_preparation_error: The error that occurs when preparing notebook. + :paramtype notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + """ + super(NotebookResourceInfo, self).__init__(**kwargs) + self.fqdn = fqdn + self.resource_id = resource_id + self.notebook_preparation_error = notebook_preparation_error + + +class Objective(msrest.serialization.Model): + """Optimization objective. + + All required parameters must be populated in order to send to Azure. + + :ivar goal: Required. [Required] Defines supported metric goals for hyperparameter tuning. + Known values are: "Minimize", "Maximize". + :vartype goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :ivar primary_metric: Required. [Required] Name of the metric to optimize. + :vartype primary_metric: str + """ + + _validation = { + 'goal': {'required': True}, + 'primary_metric': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'goal': {'key': 'goal', 'type': 'str'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + } + + def __init__( + self, + *, + goal: Union[str, "_models.Goal"], + primary_metric: str, + **kwargs + ): + """ + :keyword goal: Required. [Required] Defines supported metric goals for hyperparameter tuning. + Known values are: "Minimize", "Maximize". + :paramtype goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :keyword primary_metric: Required. [Required] Name of the metric to optimize. + :paramtype primary_metric: str + """ + super(Objective, self).__init__(**kwargs) + self.goal = goal + self.primary_metric = primary_metric + + +class OnlineDeployment(TrackedResource): + """OnlineDeployment. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: Required. The geo-location where the resource lives. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'OnlineDeploymentProperties'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.OnlineDeploymentProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: Required. The geo-location where the resource lives. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super(OnlineDeployment, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of OnlineDeployment entities. + + :ivar next_link: The link to the next page of OnlineDeployment objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type OnlineDeployment. + :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[OnlineDeployment]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.OnlineDeployment"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of OnlineDeployment objects. If null, there are + no additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type OnlineDeployment. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + """ + super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OnlineEndpoint(TrackedResource): + """OnlineEndpoint. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar location: Required. The geo-location where the resource lives. + :vartype location: str + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :vartype kind: str + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'OnlineEndpointProperties'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + } + + def __init__( + self, + *, + location: str, + properties: "_models.OnlineEndpointProperties", + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + kind: Optional[str] = None, + sku: Optional["_models.Sku"] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword location: Required. The geo-location where the resource lives. + :paramtype location: str + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for + resources of the same type. + :paramtype kind: str + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + """ + super(OnlineEndpoint, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.sku = sku + + +class OnlineEndpointProperties(EndpointPropertiesBase): + """Online endpoint configuration. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for + Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Known values are: "AMLToken", "Key", "AADToken". + :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :ivar description: Description of the inference endpoint. + :vartype description: str + :ivar keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :ivar properties: Property dictionary. Properties can be added, but not removed or altered. + :vartype properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :ivar compute: ARM resource ID of the compute if it exists. + optional. + :vartype compute: str + :ivar mirror_traffic: Percentage of traffic to be mirrored to each deployment without using + returned scoring. Traffic values need to sum to utmost 50. + :vartype mirror_traffic: dict[str, int] + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + :ivar public_network_access: Set to "Enabled" for endpoints that should allow public access + when Private Link is enabled. Known values are: "Enabled", "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :ivar traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic values + need to sum to 100. + :vartype traffic: dict[str, int] + """ + + _validation = { + 'auth_mode': {'required': True}, + 'scoring_uri': {'readonly': True}, + 'swagger_uri': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'auth_mode': {'key': 'authMode', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, + 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + 'compute': {'key': 'compute', 'type': 'str'}, + 'mirror_traffic': {'key': 'mirrorTraffic', 'type': '{int}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'public_network_access': {'key': 'publicNetworkAccess', 'type': 'str'}, + 'traffic': {'key': 'traffic', 'type': '{int}'}, + } + + def __init__( + self, + *, + auth_mode: Union[str, "_models.EndpointAuthMode"], + description: Optional[str] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, + properties: Optional[Dict[str, str]] = None, + compute: Optional[str] = None, + mirror_traffic: Optional[Dict[str, int]] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, + traffic: Optional[Dict[str, int]] = None, + **kwargs + ): + """ + :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' + for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' + does. Known values are: "AMLToken", "Key", "AADToken". + :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :keyword description: Description of the inference endpoint. + :paramtype description: str + :keyword keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be retrieved using the + ListKeys API. + :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :keyword properties: Property dictionary. Properties can be added, but not removed or altered. + :paramtype properties: dict[str, str] + :keyword compute: ARM resource ID of the compute if it exists. + optional. + :paramtype compute: str + :keyword mirror_traffic: Percentage of traffic to be mirrored to each deployment without using + returned scoring. Traffic values need to sum to utmost 50. + :paramtype mirror_traffic: dict[str, int] + :keyword public_network_access: Set to "Enabled" for endpoints that should allow public access + when Private Link is enabled. Known values are: "Enabled", "Disabled". + :paramtype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :keyword traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic + values need to sum to 100. + :paramtype traffic: dict[str, int] + """ + super(OnlineEndpointProperties, self).__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) + self.compute = compute + self.mirror_traffic = mirror_traffic + self.provisioning_state = None + self.public_network_access = public_network_access + self.traffic = traffic + + +class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of OnlineEndpoint entities. + + :ivar next_link: The link to the next page of OnlineEndpoint objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type OnlineEndpoint. + :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[OnlineEndpoint]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["_models.OnlineEndpoint"]] = None, + **kwargs + ): + """ + :keyword next_link: The link to the next page of OnlineEndpoint objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type OnlineEndpoint. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + """ + super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class OnlineRequestSettings(msrest.serialization.Model): + """Online deployment scoring requests configuration. + + :ivar max_concurrent_requests_per_instance: The number of maximum concurrent requests per node + allowed per deployment. Defaults to 1. + :vartype max_concurrent_requests_per_instance: int + :ivar max_queue_wait: The maximum amount of time a request will stay in the queue in ISO 8601 + format. + Defaults to 500ms. + :vartype max_queue_wait: ~datetime.timedelta + :ivar request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :vartype request_timeout: ~datetime.timedelta + """ + + _attribute_map = { + 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'}, + 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'}, + } + + def __init__( + self, + *, + max_concurrent_requests_per_instance: Optional[int] = 1, + max_queue_wait: Optional[datetime.timedelta] = "PT0.5S", + request_timeout: Optional[datetime.timedelta] = "PT5S", + **kwargs + ): + """ + :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per + node allowed per deployment. Defaults to 1. + :paramtype max_concurrent_requests_per_instance: int + :keyword max_queue_wait: The maximum amount of time a request will stay in the queue in ISO + 8601 format. + Defaults to 500ms. + :paramtype max_queue_wait: ~datetime.timedelta + :keyword request_timeout: The scoring timeout in ISO 8601 format. + Defaults to 5000ms. + :paramtype request_timeout: ~datetime.timedelta + """ + super(OnlineRequestSettings, self).__init__(**kwargs) + self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance + self.max_queue_wait = max_queue_wait + self.request_timeout = request_timeout + + +class OutputPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a job output. + + All required parameters must be populated in order to send to Azure. + + :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant + filled by server. Known values are: "Id", "DataPath", "OutputPath". + :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType + :ivar job_id: ARM resource ID of the job. + :vartype job_id: str + :ivar path: The path of the file/directory in the job output. + :vartype path: str + """ + + _validation = { + 'reference_type': {'required': True}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'job_id': {'key': 'jobId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + *, + job_id: Optional[str] = None, + path: Optional[str] = None, + **kwargs + ): + """ + :keyword job_id: ARM resource ID of the job. + :paramtype job_id: str + :keyword path: The path of the file/directory in the job output. + :paramtype path: str + """ + super(OutputPathAssetReference, self).__init__(**kwargs) + self.reference_type = 'OutputPath' # type: str + self.job_id = job_id + self.path = path + + +class PaginatedComputeResourcesList(msrest.serialization.Model): + """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. + + :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope. + :vartype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] + :ivar next_link: A continuation link (absolute URI) to the next page of results in the list. + :vartype next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ComputeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["_models.ComputeResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + """ + :keyword value: An array of Machine Learning compute objects wrapped in ARM resource envelope. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] + :keyword next_link: A continuation link (absolute URI) to the next page of results in the list. + :paramtype next_link: str + """ + super(PaginatedComputeResourcesList, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PartialBatchDeployment(msrest.serialization.Model): + """Mutable batch inference settings per deployment. + + :ivar description: Description of the endpoint deployment. + :vartype description: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + **kwargs + ): + """ + :keyword description: Description of the endpoint deployment. + :paramtype description: str + """ + super(PartialBatchDeployment, self).__init__(**kwargs) + self.description = description + + +class PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties(msrest.serialization.Model): + """Strictly used in update requests. + + :ivar properties: Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + properties: Optional["_models.PartialBatchDeployment"] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword properties: Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + """ + super(PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, self).__init__(**kwargs) + self.properties = properties + self.tags = tags + + +class PartialManagedServiceIdentity(msrest.serialization.Model): + """Managed service identity (system assigned and/or user assigned identities). + + :ivar type: Managed service identity (system assigned and/or user assigned identities). Known + values are: "None", "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :ivar user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :vartype user_assigned_identities: dict[str, any] + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "_models.ManagedServiceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, Any]] = None, + **kwargs + ): + """ + :keyword type: Managed service identity (system assigned and/or user assigned identities). + Known values are: "None", "SystemAssigned", "UserAssigned", "SystemAssigned,UserAssigned". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType + :keyword user_assigned_identities: The set of user assigned identities associated with the + resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. + The dictionary values can be empty objects ({}) in requests. + :paramtype user_assigned_identities: dict[str, any] + """ + super(PartialManagedServiceIdentity, self).__init__(**kwargs) + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class PartialMinimalTrackedResource(msrest.serialization.Model): + """Strictly used in update requests. + + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + """ + super(PartialMinimalTrackedResource, self).__init__(**kwargs) + self.tags = tags + + +class PartialMinimalTrackedResourceWithIdentity(PartialMinimalTrackedResource): + """Strictly used in update requests. + + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar identity: Managed service identity (system assigned and/or user assigned identities). + :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'PartialManagedServiceIdentity'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.PartialManagedServiceIdentity"] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword identity: Managed service identity (system assigned and/or user assigned identities). + :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity + """ + super(PartialMinimalTrackedResourceWithIdentity, self).__init__(tags=tags, **kwargs) + self.identity = identity + + +class PartialMinimalTrackedResourceWithSku(PartialMinimalTrackedResource): + """Strictly used in update requests. + + :ivar tags: A set of tags. Resource tags. + :vartype tags: dict[str, str] + :ivar sku: Sku details required for ARM contract for Autoscaling. + :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'PartialSku'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + sku: Optional["_models.PartialSku"] = None, + **kwargs + ): + """ + :keyword tags: A set of tags. Resource tags. + :paramtype tags: dict[str, str] + :keyword sku: Sku details required for ARM contract for Autoscaling. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku + """ + super(PartialMinimalTrackedResourceWithSku, self).__init__(tags=tags, **kwargs) + self.sku = sku + + +class PartialSku(msrest.serialization.Model): + """Common SKU definition. + + :ivar capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :vartype capacity: int + :ivar family: If the service has different generations of hardware, for the same SKU, then that + can be captured here. + :vartype family: str + :ivar name: The name of the SKU. Ex - P3. It is typically a letter+number code. + :vartype name: str + :ivar size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :vartype size: str + :ivar tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", "Premium". + :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + """ + + _attribute_map = { + 'capacity': {'key': 'capacity', 'type': 'int'}, + 'family': {'key': 'family', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__( + self, + *, + capacity: Optional[int] = None, + family: Optional[str] = None, + name: Optional[str] = None, + size: Optional[str] = None, + tier: Optional[Union[str, "_models.SkuTier"]] = None, + **kwargs + ): + """ + :keyword capacity: If the SKU supports scale out/in then the capacity integer should be + included. If scale out/in is not possible for the resource this may be omitted. + :paramtype capacity: int + :keyword family: If the service has different generations of hardware, for the same SKU, then + that can be captured here. + :paramtype family: str + :keyword name: The name of the SKU. Ex - P3. It is typically a letter+number code. + :paramtype name: str + :keyword size: The SKU size. When the name field is the combination of tier and some other + value, this would be the standalone code. + :paramtype size: str + :keyword tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", "Premium". + :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + """ + super(PartialSku, self).__init__(**kwargs) + self.capacity = capacity + self.family = family + self.name = name + self.size = size + self.tier = tier + + +class Password(msrest.serialization.Model): + """Password. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: + :vartype name: str + :ivar value: + :vartype value: str + """ + + _validation = { + 'name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(Password, self).__init__(**kwargs) + self.name = None + self.value = None + + +class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """PATAuthTypeWorkspaceConnectionProperties. + + All required parameters must be populated in order to send to Azure. + + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Known values are: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. Known values are: "JSON". + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken + """ + + _validation = { + 'auth_type': {'required': True}, + } + + _attribute_map = { + 'auth_type': {'key': 'authType', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'value_format': {'key': 'valueFormat', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionPersonalAccessToken'}, + } + + def __init__( + self, + *, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionPersonalAccessToken"] = None, + **kwargs + ): + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. Known values are: "JSON". + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken + """ + super(PATAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = 'PAT' # type: str + self.credentials = credentials + + +class PersonalComputeInstanceSettings(msrest.serialization.Model): + """Settings for a personal compute instance. + + :ivar assigned_user: A user explicitly assigned to a personal compute instance. + :vartype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser + """ + + _attribute_map = { + 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'}, + } + + def __init__( + self, + *, + assigned_user: Optional["_models.AssignedUser"] = None, + **kwargs + ): + """ + :keyword assigned_user: A user explicitly assigned to a personal compute instance. + :paramtype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser + """ + super(PersonalComputeInstanceSettings, self).__init__(**kwargs) + self.assigned_user = assigned_user + + +class PipelineJob(JobBaseProperties): + """Pipeline Job definition: defines generic to MFE attributes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. Known + values are: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar inputs: Inputs for the pipeline job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar jobs: Jobs construct the Pipeline Job. + :vartype jobs: dict[str, any] + :ivar outputs: Outputs for the pipeline job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. + :vartype settings: any + :ivar source_job_id: ARM resource ID of source job. + :vartype source_job_id: str + """ + + _validation = { + 'job_type': {'required': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'component_id': {'key': 'componentId', 'type': 'str'}, + 'compute_id': {'key': 'computeId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'services': {'key': 'services', 'type': '{JobService}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, + 'jobs': {'key': 'jobs', 'type': '{object}'}, + 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, + 'settings': {'key': 'settings', 'type': 'object'}, + 'source_job_id': {'key': 'sourceJobId', 'type': 'str'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: Optional[str] = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: Optional[bool] = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + jobs: Optional[Dict[str, Any]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + settings: Optional[Any] = None, + source_job_id: Optional[str] = None, **kwargs ): - super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs) - self.principal_id = None - self.client_id = None + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword inputs: Inputs for the pipeline job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword jobs: Jobs construct the Pipeline Job. + :paramtype jobs: dict[str, any] + :keyword outputs: Outputs for the pipeline job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. + :paramtype settings: any + :keyword source_job_id: ARM resource ID of source job. + :paramtype source_job_id: str + """ + super(PipelineJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, services=services, **kwargs) + self.job_type = 'Pipeline' # type: str + self.inputs = inputs + self.jobs = jobs + self.outputs = outputs + self.settings = settings + self.source_job_id = source_job_id -class ComputeInstance(Compute): - """An Azure Machine Learning compute instance. +class PrivateEndpoint(msrest.serialization.Model): + """The Private Endpoint resource. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. + :ivar id: The ARM identifier for Private Endpoint. + :vartype id: str + :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to. + :vartype subnet_arm_id: str + """ - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + _validation = { + 'id': {'readonly': True}, + 'subnet_arm_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + self.subnet_arm_id = None + + +class PrivateEndpointConnection(Resource): + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: A set of tags. Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar private_endpoint: The resource of private end point. + :vartype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint + :ivar private_link_service_connection_state: A collection of information about the state of the + connection between service consumer and provider. + :vartype private_link_service_connection_state: + ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Known values are: "Succeeded", "Creating", "Deleting", "Failed". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: Compute Instance properties. - :type properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState """ _validation = { - 'compute_type': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + identity: Optional["_models.ManagedServiceIdentity"] = None, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["_models.Sku"] = None, + private_endpoint: Optional["_models.PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None, + **kwargs + ): + """ + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: A set of tags. Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword private_endpoint: The resource of private end point. + :paramtype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint + :keyword private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :paramtype private_link_service_connection_state: + ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState + """ + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = None + + +class PrivateEndpointConnectionListResult(msrest.serialization.Model): + """List of private endpoint connection associated with the specified workspace. + + :ivar value: Array of private endpoint connections. + :vartype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + } + + def __init__( + self, + *, + value: Optional[List["_models.PrivateEndpointConnection"]] = None, + **kwargs + ): + """ + :keyword value: Array of private endpoint connections. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + """ + super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) + self.value = value + + +class PrivateLinkResource(Resource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: A set of tags. Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :ivar required_zone_names: The private link resource Private link DNS zone name. + :vartype required_zone_names: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + *, + identity: Optional["_models.ManagedServiceIdentity"] = None, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["_models.Sku"] = None, + required_zone_names: Optional[List[str]] = None, + **kwargs + ): + """ + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: A set of tags. Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword required_zone_names: The private link resource Private link DNS zone name. + :paramtype required_zone_names: list[str] + """ + super(PrivateLinkResource, self).__init__(**kwargs) + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.group_id = None + self.required_members = None + self.required_zone_names = required_zone_names + + +class PrivateLinkResourceListResult(msrest.serialization.Model): + """A list of private link resources. + + :ivar value: Array of private link resources. + :vartype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + *, + value: Optional[List["_models.PrivateLinkResource"]] = None, + **kwargs + ): + """ + :keyword value: Array of private link resources. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + """ + super(PrivateLinkResourceListResult, self).__init__(**kwargs) + self.value = value + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """A collection of information about the state of the connection between service consumer and provider. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", "Timeout". + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + **kwargs + ): + """ + :keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the + owner of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", + "Timeout". + :paramtype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + :keyword description: The reason for approval/rejection of the connection. + :paramtype description: str + :keyword actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :paramtype actions_required: str + """ + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = actions_required + + +class ProbeSettings(msrest.serialization.Model): + """Deployment container liveness/readiness probe configuration. + + :ivar failure_threshold: The number of failures to allow before returning an unhealthy status. + :vartype failure_threshold: int + :ivar initial_delay: The delay before the first probe in ISO 8601 format. + :vartype initial_delay: ~datetime.timedelta + :ivar period: The length of time between probes in ISO 8601 format. + :vartype period: ~datetime.timedelta + :ivar success_threshold: The number of successful probes before returning a healthy status. + :vartype success_threshold: int + :ivar timeout: The probe timeout in ISO 8601 format. + :vartype timeout: ~datetime.timedelta + """ + + _attribute_map = { + 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'}, + 'initial_delay': {'key': 'initialDelay', 'type': 'duration'}, + 'period': {'key': 'period', 'type': 'duration'}, + 'success_threshold': {'key': 'successThreshold', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__( + self, + *, + failure_threshold: Optional[int] = 30, + initial_delay: Optional[datetime.timedelta] = None, + period: Optional[datetime.timedelta] = "PT10S", + success_threshold: Optional[int] = 1, + timeout: Optional[datetime.timedelta] = "PT2S", + **kwargs + ): + """ + :keyword failure_threshold: The number of failures to allow before returning an unhealthy + status. + :paramtype failure_threshold: int + :keyword initial_delay: The delay before the first probe in ISO 8601 format. + :paramtype initial_delay: ~datetime.timedelta + :keyword period: The length of time between probes in ISO 8601 format. + :paramtype period: ~datetime.timedelta + :keyword success_threshold: The number of successful probes before returning a healthy status. + :paramtype success_threshold: int + :keyword timeout: The probe timeout in ISO 8601 format. + :paramtype timeout: ~datetime.timedelta + """ + super(ProbeSettings, self).__init__(**kwargs) + self.failure_threshold = failure_threshold + self.initial_delay = initial_delay + self.period = period + self.success_threshold = success_threshold + self.timeout = timeout + + +class ProgressMetrics(msrest.serialization.Model): + """Progress metrics definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar completed_datapoint_count: The completed datapoint count. + :vartype completed_datapoint_count: long + :ivar incremental_data_last_refresh_date_time: The time of last successful incremental data + refresh in UTC. + :vartype incremental_data_last_refresh_date_time: ~datetime.datetime + :ivar skipped_datapoint_count: The skipped datapoint count. + :vartype skipped_datapoint_count: long + :ivar total_datapoint_count: The total datapoint count. + :vartype total_datapoint_count: long + """ + + _validation = { + 'completed_datapoint_count': {'readonly': True}, + 'incremental_data_last_refresh_date_time': {'readonly': True}, + 'skipped_datapoint_count': {'readonly': True}, + 'total_datapoint_count': {'readonly': True}, + } + + _attribute_map = { + 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'}, + 'incremental_data_last_refresh_date_time': {'key': 'incrementalDataLastRefreshDateTime', 'type': 'iso-8601'}, + 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'}, + 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(ProgressMetrics, self).__init__(**kwargs) + self.completed_datapoint_count = None + self.incremental_data_last_refresh_date_time = None + self.skipped_datapoint_count = None + self.total_datapoint_count = None + + +class PyTorch(DistributionConfiguration): + """PyTorch distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar distribution_type: Required. [Required] Specifies the type of distribution + framework.Constant filled by server. Known values are: "PyTorch", "TensorFlow", "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar process_count_per_instance: Number of processes per node. + :vartype process_count_per_instance: int + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + } + + def __init__( + self, + *, + process_count_per_instance: Optional[int] = None, + **kwargs + ): + """ + :keyword process_count_per_instance: Number of processes per node. + :paramtype process_count_per_instance: int + """ + super(PyTorch, self).__init__(**kwargs) + self.distribution_type = 'PyTorch' # type: str + self.process_count_per_instance = process_count_per_instance + + +class QuotaBaseProperties(msrest.serialization.Model): + """The properties for Quota update or retrieval. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: long + :ivar unit: An enum describing the unit of quota measurement. Known values are: "Count". + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, } def __init__( self, *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["ComputeInstanceProperties"] = None, + id: Optional[str] = None, + type: Optional[str] = None, + limit: Optional[int] = None, + unit: Optional[Union[str, "_models.QuotaUnit"]] = None, **kwargs ): - super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'ComputeInstance' # type: str - self.properties = properties + """ + :keyword id: Specifies the resource ID. + :paramtype id: str + :keyword type: Specifies the resource type. + :paramtype type: str + :keyword limit: The maximum permitted quota of the resource. + :paramtype limit: long + :keyword unit: An enum describing the unit of quota measurement. Known values are: "Count". + :paramtype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + super(QuotaBaseProperties, self).__init__(**kwargs) + self.id = id + self.type = type + self.limit = limit + self.unit = unit -class ComputeInstanceApplication(msrest.serialization.Model): - """Defines an Aml Instance application and its connectivity endpoint URI. +class QuotaUpdateParameters(msrest.serialization.Model): + """Quota update parameters. - :param display_name: Name of the ComputeInstance application. - :type display_name: str - :param endpoint_uri: Application' endpoint URI. - :type endpoint_uri: str + :ivar value: The list for update quota. + :vartype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :ivar location: Region of workspace quota to be updated. + :vartype location: str """ _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, + 'location': {'key': 'location', 'type': 'str'}, } def __init__( self, *, - display_name: Optional[str] = None, - endpoint_uri: Optional[str] = None, + value: Optional[List["_models.QuotaBaseProperties"]] = None, + location: Optional[str] = None, **kwargs ): - super(ComputeInstanceApplication, self).__init__(**kwargs) - self.display_name = display_name - self.endpoint_uri = endpoint_uri + """ + :keyword value: The list for update quota. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :keyword location: Region of workspace quota to be updated. + :paramtype location: str + """ + super(QuotaUpdateParameters, self).__init__(**kwargs) + self.value = value + self.location = location -class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): - """Defines all connectivity endpoints and properties for a ComputeInstance. +class RandomSamplingAlgorithm(SamplingAlgorithm): + """Defines a Sampling Algorithm that generates values randomly. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar public_ip_address: Public IP Address of this ComputeInstance. - :vartype public_ip_address: str - :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in - which the compute instance is deployed). - :vartype private_ip_address: str + :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating + hyperparameter values, along with configuration properties.Constant filled by server. Known + values are: "Grid", "Random", "Bayesian". + :vartype sampling_algorithm_type: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType + :ivar rule: The specific type of random algorithm. Known values are: "Random", "Sobol". + :vartype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule + :ivar seed: An optional integer to use as the seed for random number generation. + :vartype seed: int """ _validation = { - 'public_ip_address': {'readonly': True}, - 'private_ip_address': {'readonly': True}, + 'sampling_algorithm_type': {'required': True}, } _attribute_map = { - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + 'rule': {'key': 'rule', 'type': 'str'}, + 'seed': {'key': 'seed', 'type': 'int'}, } def __init__( self, + *, + rule: Optional[Union[str, "_models.RandomSamplingAlgorithmRule"]] = None, + seed: Optional[int] = None, **kwargs ): - super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) - self.public_ip_address = None - self.private_ip_address = None + """ + :keyword rule: The specific type of random algorithm. Known values are: "Random", "Sobol". + :paramtype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule + :keyword seed: An optional integer to use as the seed for random number generation. + :paramtype seed: int + """ + super(RandomSamplingAlgorithm, self).__init__(**kwargs) + self.sampling_algorithm_type = 'Random' # type: str + self.rule = rule + self.seed = seed -class ComputeInstanceCreatedBy(msrest.serialization.Model): - """Describes information on user who created this ComputeInstance. +class RecurrenceSchedule(msrest.serialization.Model): + """RecurrenceSchedule. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar user_name: Name of the user. - :vartype user_name: str - :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. - :vartype user_org_id: str - :ivar user_id: Uniquely identifies the user within his/her organization. - :vartype user_id: str + :ivar hours: Required. [Required] List of hours for the schedule. + :vartype hours: list[int] + :ivar minutes: Required. [Required] List of minutes for the schedule. + :vartype minutes: list[int] + :ivar week_days: List of days for the schedule. + :vartype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] """ _validation = { - 'user_name': {'readonly': True}, - 'user_org_id': {'readonly': True}, - 'user_id': {'readonly': True}, + 'hours': {'required': True}, + 'minutes': {'required': True}, } _attribute_map = { - 'user_name': {'key': 'userName', 'type': 'str'}, - 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, - 'user_id': {'key': 'userId', 'type': 'str'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[str]'}, } def __init__( self, + *, + hours: List[int], + minutes: List[int], + week_days: Optional[List[Union[str, "_models.WeekDay"]]] = None, **kwargs ): - super(ComputeInstanceCreatedBy, self).__init__(**kwargs) - self.user_name = None - self.user_org_id = None - self.user_id = None + """ + :keyword hours: Required. [Required] List of hours for the schedule. + :paramtype hours: list[int] + :keyword minutes: Required. [Required] List of minutes for the schedule. + :paramtype minutes: list[int] + :keyword week_days: List of days for the schedule. + :paramtype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] + """ + super(RecurrenceSchedule, self).__init__(**kwargs) + self.hours = hours + self.minutes = minutes + self.week_days = week_days + + +class RecurrenceTrigger(TriggerBase): + """RecurrenceTrigger. + All required parameters must be populated in order to send to Azure. -class ComputeInstanceLastOperation(msrest.serialization.Model): - """The last operation on ComputeInstance. - - :param operation_name: Name of the last operation. Possible values include: "Create", "Start", - "Stop", "Restart", "Reimage", "Delete". - :type operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName - :param operation_time: Time of the last operation. - :type operation_time: ~datetime.datetime - :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded", - "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed". - :type operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus + :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :vartype end_time: str + :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :vartype start_time: str + :ivar time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :vartype time_zone: str + :ivar trigger_type: Required. [Required].Constant filled by server. Known values are: + "Recurrence", "Cron". + :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType + :ivar frequency: Required. [Required] The frequency to trigger schedule. Known values are: + "Minute", "Hour", "Day", "Week", "Month". + :vartype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :ivar interval: Required. [Required] Specifies schedule interval in conjunction with frequency. + :vartype interval: int + :ivar schedule: Required. [Required] The recurrence schedule. + :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule """ + _validation = { + 'trigger_type': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'schedule': {'required': True}, + } + _attribute_map = { - 'operation_name': {'key': 'operationName', 'type': 'str'}, - 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, - 'operation_status': {'key': 'operationStatus', 'type': 'str'}, + 'end_time': {'key': 'endTime', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, } def __init__( self, *, - operation_name: Optional[Union[str, "OperationName"]] = None, - operation_time: Optional[datetime.datetime] = None, - operation_status: Optional[Union[str, "OperationStatus"]] = None, + frequency: Union[str, "_models.RecurrenceFrequency"], + interval: int, + schedule: "_models.RecurrenceSchedule", + end_time: Optional[str] = None, + start_time: Optional[str] = None, + time_zone: Optional[str] = "UTC", **kwargs ): - super(ComputeInstanceLastOperation, self).__init__(**kwargs) - self.operation_name = operation_name - self.operation_time = operation_time - self.operation_status = operation_status - - -class ComputeInstanceProperties(msrest.serialization.Model): - """Compute Instance properties. + """ + :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer + https://en.wikipedia.org/wiki/ISO_8601. + Recommented format would be "2022-06-01T00:00:01" + If not present, the schedule will run indefinitely. + :paramtype end_time: str + :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC + offset. + :paramtype start_time: str + :keyword time_zone: Specifies time zone in which the schedule runs. + TimeZone should follow Windows time zone format. Refer: + https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. + :paramtype time_zone: str + :keyword frequency: Required. [Required] The frequency to trigger schedule. Known values are: + "Minute", "Hour", "Day", "Week", "Month". + :paramtype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :keyword interval: Required. [Required] Specifies schedule interval in conjunction with + frequency. + :paramtype interval: int + :keyword schedule: Required. [Required] The recurrence schedule. + :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule + """ + super(RecurrenceTrigger, self).__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) + self.trigger_type = 'Recurrence' # type: str + self.frequency = frequency + self.interval = interval + self.schedule = schedule + + +class RegenerateEndpointKeysRequest(msrest.serialization.Model): + """RegenerateEndpointKeysRequest. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :param vm_size: Virtual Machine Size. - :type vm_size: str - :param subnet: Virtual network subnet resource ID the compute nodes belong to. - :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :param application_sharing_policy: Policy for sharing applications on this compute instance - among users of parent workspace. If Personal, only the creator can access applications on this - compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". - :type application_sharing_policy: str or - ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy - :param ssh_settings: Specifies policy and settings for SSH access. - :type ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings - :ivar connectivity_endpoints: Describes all connectivity endpoints available for this - ComputeInstance. - :vartype connectivity_endpoints: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints - :ivar applications: Describes available applications and their endpoints on this - ComputeInstance. - :vartype applications: - list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] - :ivar created_by: Describes information on user who created this ComputeInstance. - :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy - :ivar errors: Collection of errors encountered on this ComputeInstance. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", - "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed", - "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable". - :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState - :ivar last_operation: The last operation on ComputeInstance. - :vartype last_operation: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation + :ivar key_type: Required. [Required] Specification for which type of key to generate. Primary + or Secondary. Known values are: "Primary", "Secondary". + :vartype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType + :ivar key_value: The value the key is set to. + :vartype key_value: str """ _validation = { - 'connectivity_endpoints': {'readonly': True}, - 'applications': {'readonly': True}, - 'created_by': {'readonly': True}, - 'errors': {'readonly': True}, - 'state': {'readonly': True}, - 'last_operation': {'readonly': True}, + 'key_type': {'required': True}, } _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, - 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, - 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, - 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, - 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, - 'state': {'key': 'state', 'type': 'str'}, - 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, + 'key_type': {'key': 'keyType', 'type': 'str'}, + 'key_value': {'key': 'keyValue', 'type': 'str'}, } def __init__( self, *, - vm_size: Optional[str] = None, - subnet: Optional["ResourceId"] = None, - application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared", - ssh_settings: Optional["ComputeInstanceSshSettings"] = None, + key_type: Union[str, "_models.KeyType"], + key_value: Optional[str] = None, **kwargs ): - super(ComputeInstanceProperties, self).__init__(**kwargs) - self.vm_size = vm_size - self.subnet = subnet - self.application_sharing_policy = application_sharing_policy - self.ssh_settings = ssh_settings - self.connectivity_endpoints = None - self.applications = None - self.created_by = None - self.errors = None - self.state = None - self.last_operation = None + """ + :keyword key_type: Required. [Required] Specification for which type of key to generate. + Primary or Secondary. Known values are: "Primary", "Secondary". + :paramtype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType + :keyword key_value: The value the key is set to. + :paramtype key_value: str + """ + super(RegenerateEndpointKeysRequest, self).__init__(**kwargs) + self.key_type = key_type + self.key_value = key_value -class ComputeInstanceSshSettings(msrest.serialization.Model): - """Specifies policy and settings for SSH access. +class RegistryListCredentialsResult(msrest.serialization.Model): + """RegistryListCredentialsResult. Variables are only populated by the server, and will be ignored when sending a request. - :param ssh_public_access: State of the public SSH port. Possible values are: Disabled - - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". - :type ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess - :ivar admin_user_name: Describes the admin user name. - :vartype admin_user_name: str - :ivar ssh_port: Describes the port for connecting through SSH. - :vartype ssh_port: int - :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t - rsa -b 2048" to generate your SSH key pairs. - :type admin_public_key: str + :ivar location: + :vartype location: str + :ivar username: + :vartype username: str + :ivar passwords: + :vartype passwords: list[~azure.mgmt.machinelearningservices.models.Password] """ _validation = { - 'admin_user_name': {'readonly': True}, - 'ssh_port': {'readonly': True}, + 'location': {'readonly': True}, + 'username': {'readonly': True}, } _attribute_map = { - 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'str'}, + 'passwords': {'key': 'passwords', 'type': '[Password]'}, } def __init__( self, *, - ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled", - admin_public_key: Optional[str] = None, + passwords: Optional[List["_models.Password"]] = None, **kwargs ): - super(ComputeInstanceSshSettings, self).__init__(**kwargs) - self.ssh_public_access = ssh_public_access - self.admin_user_name = None - self.ssh_port = None - self.admin_public_key = admin_public_key + """ + :keyword passwords: + :paramtype passwords: list[~azure.mgmt.machinelearningservices.models.Password] + """ + super(RegistryListCredentialsResult, self).__init__(**kwargs) + self.location = None + self.username = None + self.passwords = passwords -class Resource(msrest.serialization.Model): - """Azure Resource Manager resource envelope. +class Regression(AutoMLVertical, TableVertical): + """Regression task in AutoML Table vertical. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar cv_split_column_names: Columns to use for CVSplit data. + :vartype cv_split_column_names: list[str] + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset + when validation dataset is not provided. + :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :ivar test_data: Test data input. + :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype test_data_size: float + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :vartype validation_data_size: float + :ivar weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :vartype weight_column_name: str + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for regression task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + "NormalizedMeanAbsoluteError". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics + :ivar training_settings: Inputs for training phase for an AutoML Job. + :vartype training_settings: + ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, + 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, + 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, + 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + 'training_settings': {'key': 'trainingSettings', 'type': 'RegressionTrainingSettings'}, } def __init__( self, *, - identity: Optional["Identity"] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, + training_data: "_models.MLTableJobInput", + cv_split_column_names: Optional[List[str]] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, + test_data_size: Optional[float] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + weight_column_name: Optional[str] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.RegressionPrimaryMetrics"]] = None, + training_settings: Optional["_models.RegressionTrainingSettings"] = None, **kwargs ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.identity = identity - self.location = location - self.type = None - self.tags = tags - self.sku = sku - - -class ComputeResource(Resource): - """Machine Learning compute object wrapped into ARM resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param properties: Compute properties. - :type properties: ~azure.mgmt.machinelearningservices.models.Compute + """ + :keyword cv_split_column_names: Columns to use for CVSplit data. + :paramtype cv_split_column_names: list[str] + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: + ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings + :keyword n_cross_validations: Number of cross validation folds to be applied on training + dataset + when validation dataset is not provided. + :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations + :keyword test_data: Test data input. + :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation + purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype test_data_size: float + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword validation_data_size: The fraction of training dataset that needs to be set aside for + validation purpose. + Values between (0.0 , 1.0) + Applied when validation dataset is not provided. + :paramtype validation_data_size: float + :keyword weight_column_name: The name of the sample weight column. Automated ML supports a + weighted column as an input, causing rows in the data to be weighted up or down. + :paramtype weight_column_name: str + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword primary_metric: Primary metric for regression task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + "NormalizedMeanAbsoluteError". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics + :keyword training_settings: Inputs for training phase for an AutoML Job. + :paramtype training_settings: + ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings + """ + super(Regression, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, limit_settings=limit_settings, n_cross_validations=n_cross_validations, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, validation_data_size=validation_data_size, weight_column_name=weight_column_name, **kwargs) + self.cv_split_column_names = cv_split_column_names + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.n_cross_validations = n_cross_validations + self.test_data = test_data + self.test_data_size = test_data_size + self.validation_data = validation_data + self.validation_data_size = validation_data_size + self.weight_column_name = weight_column_name + self.task_type = 'Regression' # type: str + self.primary_metric = primary_metric + self.training_settings = training_settings + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class RegressionTrainingSettings(TrainingSettings): + """Regression Training related configuration. + + :ivar enable_dnn_training: Enable recommendation of DNN models. + :vartype enable_dnn_training: bool + :ivar enable_model_explainability: Flag to turn on explainability on best model. + :vartype enable_model_explainability: bool + :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :vartype enable_onnx_compatible_models: bool + :ivar enable_stack_ensemble: Enable stack ensemble run. + :vartype enable_stack_ensemble: bool + :ivar enable_vote_ensemble: Enable voting ensemble run. + :vartype enable_vote_ensemble: bool + :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :vartype ensemble_model_download_timeout: ~datetime.timedelta + :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :vartype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :ivar allowed_training_algorithms: Allowed models for regression task. + :vartype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] + :ivar blocked_training_algorithms: Blocked models for regression task. + :vartype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] """ - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'properties': {'key': 'properties', 'type': 'Compute'}, + 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, + 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, + 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, + 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, + 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, + 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, + 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, + 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, + 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, } def __init__( self, *, - identity: Optional["Identity"] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, - properties: Optional["Compute"] = None, + enable_dnn_training: Optional[bool] = False, + enable_model_explainability: Optional[bool] = True, + enable_onnx_compatible_models: Optional[bool] = False, + enable_stack_ensemble: Optional[bool] = True, + enable_vote_ensemble: Optional[bool] = True, + ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, **kwargs ): - super(ComputeResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs) - self.properties = properties - + """ + :keyword enable_dnn_training: Enable recommendation of DNN models. + :paramtype enable_dnn_training: bool + :keyword enable_model_explainability: Flag to turn on explainability on best model. + :paramtype enable_model_explainability: bool + :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. + :paramtype enable_onnx_compatible_models: bool + :keyword enable_stack_ensemble: Enable stack ensemble run. + :paramtype enable_stack_ensemble: bool + :keyword enable_vote_ensemble: Enable voting ensemble run. + :paramtype enable_vote_ensemble: bool + :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model + generation, multiple fitted models from the previous child runs are downloaded. + Configure this parameter with a higher value than 300 secs, if more time is needed. + :paramtype ensemble_model_download_timeout: ~datetime.timedelta + :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. + :paramtype stack_ensemble_settings: + ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings + :keyword allowed_training_algorithms: Allowed models for regression task. + :paramtype allowed_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] + :keyword blocked_training_algorithms: Blocked models for regression task. + :paramtype blocked_training_algorithms: list[str or + ~azure.mgmt.machinelearningservices.models.RegressionModels] + """ + super(RegressionTrainingSettings, self).__init__(enable_dnn_training=enable_dnn_training, enable_model_explainability=enable_model_explainability, enable_onnx_compatible_models=enable_onnx_compatible_models, enable_stack_ensemble=enable_stack_ensemble, enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, **kwargs) + self.allowed_training_algorithms = allowed_training_algorithms + self.blocked_training_algorithms = blocked_training_algorithms -class Databricks(Compute): - """A DataFactory compute. - Variables are only populated by the server, and will be ignored when sending a request. +class ResourceId(msrest.serialization.Model): + """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. All required parameters must be populated in order to send to Azure. - - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties + + :ivar id: Required. The ID of the resource. + :vartype id: str """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + 'id': {'required': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + 'id': {'key': 'id', 'type': 'str'}, } def __init__( self, *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["DatabricksProperties"] = None, + id: str, **kwargs ): - super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'Databricks' # type: str - self.properties = properties + """ + :keyword id: Required. The ID of the resource. + :paramtype id: str + """ + super(ResourceId, self).__init__(**kwargs) + self.id = id -class DatabricksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on Databricks. +class ResourceName(msrest.serialization.Model): + """The Resource Name. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param databricks_access_token: access token for databricks account. - :type databricks_access_token: str + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str """ _validation = { - 'compute_type': {'required': True}, + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, } def __init__( self, - *, - databricks_access_token: Optional[str] = None, **kwargs ): - super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.compute_type = 'Databricks' # type: str - self.databricks_access_token = databricks_access_token + """ + """ + super(ResourceName, self).__init__(**kwargs) + self.value = None + self.localized_value = None -class DatabricksProperties(msrest.serialization.Model): - """DatabricksProperties. +class ResourceQuota(msrest.serialization.Model): + """The quota assigned to a resource. - :param databricks_access_token: Databricks access token. - :type databricks_access_token: str + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar name: Name of the resource. + :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: long + :ivar unit: An enum describing the unit of quota measurement. Known values are: "Count". + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit """ + _validation = { + 'id': {'readonly': True}, + 'aml_workspace_location': {'readonly': True}, + 'type': {'readonly': True}, + 'name': {'readonly': True}, + 'limit': {'readonly': True}, + 'unit': {'readonly': True}, + } + _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'ResourceName'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, } def __init__( self, - *, - databricks_access_token: Optional[str] = None, **kwargs ): - super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = databricks_access_token - + """ + """ + super(ResourceQuota, self).__init__(**kwargs) + self.id = None + self.aml_workspace_location = None + self.type = None + self.name = None + self.limit = None + self.unit = None -class DataFactory(Compute): - """A DataFactory compute. - Variables are only populated by the server, and will be ignored when sending a request. +class Route(msrest.serialization.Model): + """Route. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool + :ivar path: Required. [Required] The path for the route. + :vartype path: str + :ivar port: Required. [Required] The port for the route. + :vartype port: int """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'port': {'required': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'path': {'key': 'path', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, } def __init__( self, *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, + path: str, + port: int, **kwargs ): - super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'DataFactory' # type: str - + """ + :keyword path: Required. [Required] The path for the route. + :paramtype path: str + :keyword port: Required. [Required] The port for the route. + :paramtype port: int + """ + super(Route, self).__init__(**kwargs) + self.path = path + self.port = port -class DataLakeAnalytics(Compute): - """A DataLakeAnalytics compute. - Variables are only populated by the server, and will be ignored when sending a request. +class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """SASAuthTypeWorkspaceConnectionProperties. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Known values are: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. Known values are: "JSON". + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + 'auth_type': {'required': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, + 'auth_type': {'key': 'authType', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'value_format': {'key': 'valueFormat', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionSharedAccessSignature'}, } def __init__( self, *, - compute_location: Optional[str] = None, - description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["DataLakeAnalyticsProperties"] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionSharedAccessSignature"] = None, **kwargs ): - super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'DataLakeAnalytics' # type: str - self.properties = properties - + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. Known values are: "JSON". + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature + """ + super(SASAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = 'SAS' # type: str + self.credentials = credentials + + +class SasDatastoreCredentials(DatastoreCredentials): + """SAS datastore credentials configuration. -class DataLakeAnalyticsProperties(msrest.serialization.Model): - """DataLakeAnalyticsProperties. + All required parameters must be populated in order to send to Azure. - :param data_lake_store_account_name: DataLake Store Account Name. - :type data_lake_store_account_name: str + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar secrets: Required. [Required] Storage container secrets. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets """ + _validation = { + 'credentials_type': {'required': True}, + 'secrets': {'required': True}, + } + _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'}, } def __init__( self, *, - data_lake_store_account_name: Optional[str] = None, + secrets: "_models.SasDatastoreSecrets", **kwargs ): - super(DataLakeAnalyticsProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = data_lake_store_account_name + """ + :keyword secrets: Required. [Required] Storage container secrets. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets + """ + super(SasDatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = 'Sas' # type: str + self.secrets = secrets -class EncryptionProperty(msrest.serialization.Model): - """EncryptionProperty. +class SasDatastoreSecrets(DatastoreSecrets): + """Datastore SAS secrets. All required parameters must be populated in order to send to Azure. - :param status: Required. Indicates whether or not the encryption is enabled for the workspace. - Possible values include: "Enabled", "Disabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :param key_vault_properties: Required. Customer Key vault properties. - :type key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties + :ivar secrets_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "Sas", + "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar sas_token: Storage container SAS token. + :vartype sas_token: str """ _validation = { - 'status': {'required': True}, - 'key_vault_properties': {'required': True}, + 'secrets_type': {'required': True}, } _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'str'}, } def __init__( self, *, - status: Union[str, "EncryptionStatus"], - key_vault_properties: "KeyVaultProperties", + sas_token: Optional[str] = None, **kwargs ): - super(EncryptionProperty, self).__init__(**kwargs) - self.status = status - self.key_vault_properties = key_vault_properties + """ + :keyword sas_token: Storage container SAS token. + :paramtype sas_token: str + """ + super(SasDatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = 'Sas' # type: str + self.sas_token = sas_token -class ErrorDetail(msrest.serialization.Model): - """Error detail information. +class ScaleSettings(msrest.serialization.Model): + """scale settings for AML Compute. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str + :ivar max_node_count: Required. Max number of nodes to use. + :vartype max_node_count: int + :ivar min_node_count: Min number of nodes to use. + :vartype min_node_count: int + :ivar node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This + string needs to be in the RFC Format. + :vartype node_idle_time_before_scale_down: ~datetime.timedelta """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'max_node_count': {'required': True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, } def __init__( self, *, - code: str, - message: str, + max_node_count: int, + min_node_count: Optional[int] = 0, + node_idle_time_before_scale_down: Optional[datetime.timedelta] = None, **kwargs ): - super(ErrorDetail, self).__init__(**kwargs) - self.code = code - self.message = message - + """ + :keyword max_node_count: Required. Max number of nodes to use. + :paramtype max_node_count: int + :keyword min_node_count: Min number of nodes to use. + :paramtype min_node_count: int + :keyword node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This + string needs to be in the RFC Format. + :paramtype node_idle_time_before_scale_down: ~datetime.timedelta + """ + super(ScaleSettings, self).__init__(**kwargs) + self.max_node_count = max_node_count + self.min_node_count = min_node_count + self.node_idle_time_before_scale_down = node_idle_time_before_scale_down -class ErrorResponse(msrest.serialization.Model): - """Error response information. - Variables are only populated by the server, and will be ignored when sending a request. +class ScaleSettingsInformation(msrest.serialization.Model): + """Desired scale settings for the amlCompute. - :ivar code: Error code. - :vartype code: str - :ivar message: Error message. - :vartype message: str - :ivar details: An array of error detail objects. - :vartype details: list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar scale_settings: scale settings for AML Compute. + :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings """ - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'details': {'readonly': True}, - } - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, } def __init__( self, + *, + scale_settings: Optional["_models.ScaleSettings"] = None, **kwargs ): - super(ErrorResponse, self).__init__(**kwargs) - self.code = None - self.message = None - self.details = None + """ + :keyword scale_settings: scale settings for AML Compute. + :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings + """ + super(ScaleSettingsInformation, self).__init__(**kwargs) + self.scale_settings = scale_settings -class EstimatedVMPrice(msrest.serialization.Model): - """The estimated price info for using a VM of a particular OS type, tier, etc. +class Schedule(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param retail_price: Required. The price charged for using the VM. - :type retail_price: float - :param os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". - :type os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :param vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". - :type vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. [Required] Additional attributes of the entity. + :vartype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties """ _validation = { - 'retail_price': {'required': True}, - 'os_type': {'required': True}, - 'vm_tier': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { - 'retail_price': {'key': 'retailPrice', 'type': 'float'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_tier': {'key': 'vmTier', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'ScheduleProperties'}, } def __init__( self, *, - retail_price: float, - os_type: Union[str, "VMPriceOSType"], - vm_tier: Union[str, "VMTier"], + properties: "_models.ScheduleProperties", **kwargs ): - super(EstimatedVMPrice, self).__init__(**kwargs) - self.retail_price = retail_price - self.os_type = os_type - self.vm_tier = vm_tier - + """ + :keyword properties: Required. [Required] Additional attributes of the entity. + :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties + """ + super(Schedule, self).__init__(**kwargs) + self.properties = properties -class EstimatedVMPrices(msrest.serialization.Model): - """The estimated price info for using a VM. - All required parameters must be populated in order to send to Azure. +class ScheduleBase(msrest.serialization.Model): + """ScheduleBase. - :param billing_currency: Required. Three lettered code specifying the currency of the VM price. - Example: USD. Possible values include: "USD". - :type billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :param unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". - :type unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :param values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. - :type values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + :ivar id: + :vartype id: str + :ivar provisioning_status: Known values are: "Completed", "Provisioning", "Failed". + :vartype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState + :ivar status: Known values are: "Enabled", "Disabled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus """ - _validation = { - 'billing_currency': {'required': True}, - 'unit_of_measure': {'required': True}, - 'values': {'required': True}, - } - _attribute_map = { - 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, - 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, + 'id': {'key': 'id', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__( self, *, - billing_currency: Union[str, "BillingCurrency"], - unit_of_measure: Union[str, "UnitOfMeasure"], - values: List["EstimatedVMPrice"], + id: Optional[str] = None, + provisioning_status: Optional[Union[str, "_models.ScheduleProvisioningState"]] = None, + status: Optional[Union[str, "_models.ScheduleStatus"]] = None, **kwargs ): - super(EstimatedVMPrices, self).__init__(**kwargs) - self.billing_currency = billing_currency - self.unit_of_measure = unit_of_measure - self.values = values + """ + :keyword id: + :paramtype id: str + :keyword provisioning_status: Known values are: "Completed", "Provisioning", "Failed". + :paramtype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState + :keyword status: Known values are: "Enabled", "Disabled". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus + """ + super(ScheduleBase, self).__init__(**kwargs) + self.id = id + self.provisioning_status = provisioning_status + self.status = status -class HDInsight(Compute): - """A HDInsight compute. +class ScheduleProperties(ResourceBase): + """Base definition of a schedule. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar action: Required. [Required] Specifies the action of the schedule. + :vartype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase + :ivar display_name: Display name of schedule. + :vartype display_name: str + :ivar is_enabled: Is the schedule enabled?. + :vartype is_enabled: bool + :ivar provisioning_state: Provisioning state for the schedule. Known values are: "Creating", + "Updating", "Deleting", "Succeeded", "Failed", "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties + ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningStatus + :ivar trigger: Required. [Required] Specifies the trigger details. + :vartype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase """ _validation = { - 'compute_type': {'required': True}, + 'action': {'required': True}, 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + 'trigger': {'required': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'action': {'key': 'action', 'type': 'ScheduleActionBase'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'trigger': {'key': 'trigger', 'type': 'TriggerBase'}, } def __init__( self, *, - compute_location: Optional[str] = None, + action: "_models.ScheduleActionBase", + trigger: "_models.TriggerBase", description: Optional[str] = None, - resource_id: Optional[str] = None, - properties: Optional["HDInsightProperties"] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + display_name: Optional[str] = None, + is_enabled: Optional[bool] = True, **kwargs ): - super(HDInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'HDInsight' # type: str - self.properties = properties + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword action: Required. [Required] Specifies the action of the schedule. + :paramtype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase + :keyword display_name: Display name of schedule. + :paramtype display_name: str + :keyword is_enabled: Is the schedule enabled?. + :paramtype is_enabled: bool + :keyword trigger: Required. [Required] Specifies the trigger details. + :paramtype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase + """ + super(ScheduleProperties, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + self.action = action + self.display_name = display_name + self.is_enabled = is_enabled + self.provisioning_state = None + self.trigger = trigger -class HDInsightProperties(msrest.serialization.Model): - """HDInsightProperties. - - :param ssh_port: Port open for ssh connections on the master node of the cluster. - :type ssh_port: int - :param address: Public IP address of the master node of the cluster. - :type address: str - :param administrator_account: Admin credentials for master node of the cluster. - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials +class ScheduleResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of Schedule entities. + + :ivar next_link: The link to the next page of Schedule objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type Schedule. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Schedule] """ _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Schedule]'}, } def __init__( self, *, - ssh_port: Optional[int] = None, - address: Optional[str] = None, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, + next_link: Optional[str] = None, + value: Optional[List["_models.Schedule"]] = None, **kwargs ): - super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = ssh_port - self.address = address - self.administrator_account = administrator_account + """ + :keyword next_link: The link to the next page of Schedule objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type Schedule. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Schedule] + """ + super(ScheduleResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value -class Identity(msrest.serialization.Model): - """Identity for the resource. +class ScriptReference(msrest.serialization.Model): + """Script reference. - Variables are only populated by the server, and will be ignored when sending a request. + :ivar script_source: The storage source of the script: inline, workspace. + :vartype script_source: str + :ivar script_data: The location of scripts in the mounted volume. + :vartype script_data: str + :ivar script_arguments: Optional command line arguments passed to the script to run. + :vartype script_arguments: str + :ivar timeout: Optional time period passed to timeout command. + :vartype timeout: str + """ - All required parameters must be populated in order to send to Azure. + _attribute_map = { + 'script_source': {'key': 'scriptSource', 'type': 'str'}, + 'script_data': {'key': 'scriptData', 'type': 'str'}, + 'script_arguments': {'key': 'scriptArguments', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'str'}, + } - :ivar principal_id: The principal ID of resource identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of resource. - :vartype tenant_id: str - :param type: Required. The identity type. Possible values include: "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned", "None". - :type type: str or ~azure.mgmt.machinelearningservices.models.ResourceIdentityType - :param user_assigned_identities: The list of user identities associated with resource. The user - identity dictionary key references will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. - :type user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + def __init__( + self, + *, + script_source: Optional[str] = None, + script_data: Optional[str] = None, + script_arguments: Optional[str] = None, + timeout: Optional[str] = None, + **kwargs + ): + """ + :keyword script_source: The storage source of the script: inline, workspace. + :paramtype script_source: str + :keyword script_data: The location of scripts in the mounted volume. + :paramtype script_data: str + :keyword script_arguments: Optional command line arguments passed to the script to run. + :paramtype script_arguments: str + :keyword timeout: Optional time period passed to timeout command. + :paramtype timeout: str + """ + super(ScriptReference, self).__init__(**kwargs) + self.script_source = script_source + self.script_data = script_data + self.script_arguments = script_arguments + self.timeout = timeout + + +class ScriptsToExecute(msrest.serialization.Model): + """Customized setup scripts. + + :ivar startup_script: Script that's run every time the machine starts. + :vartype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference + :ivar creation_script: Script that's run only once during provision of the compute. + :vartype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference """ - _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, + _attribute_map = { + 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'}, + 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'}, } + def __init__( + self, + *, + startup_script: Optional["_models.ScriptReference"] = None, + creation_script: Optional["_models.ScriptReference"] = None, + **kwargs + ): + """ + :keyword startup_script: Script that's run every time the machine starts. + :paramtype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference + :keyword creation_script: Script that's run only once during provision of the compute. + :paramtype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference + """ + super(ScriptsToExecute, self).__init__(**kwargs) + self.startup_script = startup_script + self.creation_script = creation_script + + +class ServiceManagedResourcesSettings(msrest.serialization.Model): + """ServiceManagedResourcesSettings. + + :ivar cosmos_db: The settings for the service managed cosmosdb account. + :vartype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings + """ + _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'}, } def __init__( self, *, - type: Union[str, "ResourceIdentityType"], - user_assigned_identities: Optional[Dict[str, "ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + cosmos_db: Optional["_models.CosmosDbSettings"] = None, **kwargs ): - super(Identity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = type - self.user_assigned_identities = user_assigned_identities + """ + :keyword cosmos_db: The settings for the service managed cosmosdb account. + :paramtype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings + """ + super(ServiceManagedResourcesSettings, self).__init__(**kwargs) + self.cosmos_db = cosmos_db -class KeyVaultProperties(msrest.serialization.Model): - """KeyVaultProperties. +class ServicePrincipalDatastoreCredentials(DatastoreCredentials): + """Service Principal datastore credentials configuration. All required parameters must be populated in order to send to Azure. - :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned - encryption key is present. - :type key_vault_arm_id: str - :param key_identifier: Required. Key vault uri to access the encryption key. - :type key_identifier: str - :param identity_client_id: For future use - The client id of the identity which will be used to - access key vault. - :type identity_client_id: str + :ivar credentials_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "None", + "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar authority_url: Authority URL used for authentication. + :vartype authority_url: str + :ivar client_id: Required. [Required] Service principal client ID. + :vartype client_id: str + :ivar resource_url: Resource the service principal has access to. + :vartype resource_url: str + :ivar secrets: Required. [Required] Service principal secrets. + :vartype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets + :ivar tenant_id: Required. [Required] ID of the tenant to which the service principal belongs. + :vartype tenant_id: str """ _validation = { - 'key_vault_arm_id': {'required': True}, - 'key_identifier': {'required': True}, + 'credentials_type': {'required': True}, + 'client_id': {'required': True}, + 'secrets': {'required': True}, + 'tenant_id': {'required': True}, } _attribute_map = { - 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__( self, *, - key_vault_arm_id: str, - key_identifier: str, - identity_client_id: Optional[str] = None, + client_id: str, + secrets: "_models.ServicePrincipalDatastoreSecrets", + tenant_id: str, + authority_url: Optional[str] = None, + resource_url: Optional[str] = None, **kwargs ): - super(KeyVaultProperties, self).__init__(**kwargs) - self.key_vault_arm_id = key_vault_arm_id - self.key_identifier = key_identifier - self.identity_client_id = identity_client_id + """ + :keyword authority_url: Authority URL used for authentication. + :paramtype authority_url: str + :keyword client_id: Required. [Required] Service principal client ID. + :paramtype client_id: str + :keyword resource_url: Resource the service principal has access to. + :paramtype resource_url: str + :keyword secrets: Required. [Required] Service principal secrets. + :paramtype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets + :keyword tenant_id: Required. [Required] ID of the tenant to which the service principal + belongs. + :paramtype tenant_id: str + """ + super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = 'ServicePrincipal' # type: str + self.authority_url = authority_url + self.client_id = client_id + self.resource_url = resource_url + self.secrets = secrets + self.tenant_id = tenant_id -class ListAmlUserFeatureResult(msrest.serialization.Model): - """The List Aml user feature operation response. +class ServicePrincipalDatastoreSecrets(DatastoreSecrets): + """Datastore Service Principal secrets. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar value: The list of AML user facing features. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] - :ivar next_link: The URI to fetch the next page of AML user features information. Call - ListNext() with this to fetch the next page of AML user features information. - :vartype next_link: str + :ivar secrets_type: Required. [Required] Credential type used to authentication with + storage.Constant filled by server. Known values are: "AccountKey", "Certificate", "Sas", + "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType + :ivar client_secret: Service principal secret. + :vartype client_secret: str """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + 'secrets_type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, } def __init__( self, + *, + client_secret: Optional[str] = None, **kwargs ): - super(ListAmlUserFeatureResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - + """ + :keyword client_secret: Service principal secret. + :paramtype client_secret: str + """ + super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = 'ServicePrincipal' # type: str + self.client_secret = client_secret -class ListUsagesResult(msrest.serialization.Model): - """The List Usages operation response. - Variables are only populated by the server, and will be ignored when sending a request. +class SetupScripts(msrest.serialization.Model): + """Details of customized scripts to execute for setting up the cluster. - :ivar value: The list of AML resource usages. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] - :ivar next_link: The URI to fetch the next page of AML resource usage information. Call - ListNext() with this to fetch the next page of AML resource usage information. - :vartype next_link: str + :ivar scripts: Customized setup scripts. + :vartype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute """ - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - _attribute_map = { - 'value': {'key': 'value', 'type': '[Usage]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'}, } def __init__( self, + *, + scripts: Optional["_models.ScriptsToExecute"] = None, **kwargs ): - super(ListUsagesResult, self).__init__(**kwargs) - self.value = None - self.next_link = None + """ + :keyword scripts: Customized setup scripts. + :paramtype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute + """ + super(SetupScripts, self).__init__(**kwargs) + self.scripts = scripts -class ListWorkspaceKeysResult(msrest.serialization.Model): - """ListWorkspaceKeysResult. - - Variables are only populated by the server, and will be ignored when sending a request. +class SharedPrivateLinkResource(msrest.serialization.Model): + """SharedPrivateLinkResource. - :ivar user_storage_key: - :vartype user_storage_key: str - :ivar user_storage_resource_id: - :vartype user_storage_resource_id: str - :ivar app_insights_instrumentation_key: - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult - :param notebook_access_keys: - :type notebook_access_keys: - ~azure.mgmt.machinelearningservices.models.NotebookListCredentialsResult + :ivar name: Unique name of the private link. + :vartype name: str + :ivar private_link_resource_id: The resource id that private link links to. + :vartype private_link_resource_id: str + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar request_message: Request message. + :vartype request_message: str + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", "Timeout". + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus """ - _validation = { - 'user_storage_key': {'readonly': True}, - 'user_storage_resource_id': {'readonly': True}, - 'app_insights_instrumentation_key': {'readonly': True}, - 'container_registry_credentials': {'readonly': True}, - } - _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, - 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'}, + 'name': {'key': 'name', 'type': 'str'}, + 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, } def __init__( self, *, - notebook_access_keys: Optional["NotebookListCredentialsResult"] = None, + name: Optional[str] = None, + private_link_resource_id: Optional[str] = None, + group_id: Optional[str] = None, + request_message: Optional[str] = None, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, **kwargs ): - super(ListWorkspaceKeysResult, self).__init__(**kwargs) - self.user_storage_key = None - self.user_storage_resource_id = None - self.app_insights_instrumentation_key = None - self.container_registry_credentials = None - self.notebook_access_keys = notebook_access_keys + """ + :keyword name: Unique name of the private link. + :paramtype name: str + :keyword private_link_resource_id: The resource id that private link links to. + :paramtype private_link_resource_id: str + :keyword group_id: The private link resource group id. + :paramtype group_id: str + :keyword request_message: Request message. + :paramtype request_message: str + :keyword status: Indicates whether the connection has been Approved/Rejected/Removed by the + owner of the service. Known values are: "Pending", "Approved", "Rejected", "Disconnected", + "Timeout". + :paramtype status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + """ + super(SharedPrivateLinkResource, self).__init__(**kwargs) + self.name = name + self.private_link_resource_id = private_link_resource_id + self.group_id = group_id + self.request_message = request_message + self.status = status -class ListWorkspaceQuotas(msrest.serialization.Model): - """The List WorkspaceQuotasByVMFamily operation response. +class Sku(msrest.serialization.Model): + """The resource model definition representing SKU. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar value: The list of Workspace Quotas by VM Family. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] - :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. - Call ListNext() with this to fetch the next page of Workspace Quota information. - :vartype next_link: str + :ivar name: Required. The name of the SKU. Ex - P3. It is typically a letter+number code. + :vartype name: str + :ivar tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", "Premium". + :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + :ivar size: The SKU size. When the name field is the combination of tier and some other value, + this would be the standalone code. + :vartype size: str + :ivar family: If the service has different generations of hardware, for the same SKU, then that + can be captured here. + :vartype family: str + :ivar capacity: If the SKU supports scale out/in then the capacity integer should be included. + If scale out/in is not possible for the resource this may be omitted. + :vartype capacity: int """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + 'name': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[ResourceQuota]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + 'capacity': {'key': 'capacity', 'type': 'int'}, } def __init__( self, + *, + name: str, + tier: Optional[Union[str, "_models.SkuTier"]] = None, + size: Optional[str] = None, + family: Optional[str] = None, + capacity: Optional[int] = None, **kwargs ): - super(ListWorkspaceQuotas, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class MachineLearningServiceError(msrest.serialization.Model): - """Wrapper for error response to follow ARM guidelines. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error: The error response. - :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse + """ + :keyword name: Required. The name of the SKU. Ex - P3. It is typically a letter+number code. + :paramtype name: str + :keyword tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", "Premium". + :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + :keyword size: The SKU size. When the name field is the combination of tier and some other + value, this would be the standalone code. + :paramtype size: str + :keyword family: If the service has different generations of hardware, for the same SKU, then + that can be captured here. + :paramtype family: str + :keyword capacity: If the SKU supports scale out/in then the capacity integer should be + included. If scale out/in is not possible for the resource this may be omitted. + :paramtype capacity: int + """ + super(Sku, self).__init__(**kwargs) + self.name = name + self.tier = tier + self.size = size + self.family = family + self.capacity = capacity + + +class SkuCapacity(msrest.serialization.Model): + """SKU capacity information. + + :ivar default: Gets or sets the default capacity. + :vartype default: int + :ivar maximum: Gets or sets the maximum. + :vartype maximum: int + :ivar minimum: Gets or sets the minimum. + :vartype minimum: int + :ivar scale_type: Gets or sets the type of the scale. Known values are: "Automatic", "Manual", + "None". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType """ - _validation = { - 'error': {'readonly': True}, - } - _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorResponse'}, + 'default': {'key': 'default', 'type': 'int'}, + 'maximum': {'key': 'maximum', 'type': 'int'}, + 'minimum': {'key': 'minimum', 'type': 'int'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, } def __init__( self, + *, + default: Optional[int] = 0, + maximum: Optional[int] = 0, + minimum: Optional[int] = 0, + scale_type: Optional[Union[str, "_models.SkuScaleType"]] = None, **kwargs ): - super(MachineLearningServiceError, self).__init__(**kwargs) - self.error = None - - -class NodeStateCounts(msrest.serialization.Model): - """Counts of various compute node states on the amlCompute. + """ + :keyword default: Gets or sets the default capacity. + :paramtype default: int + :keyword maximum: Gets or sets the maximum. + :paramtype maximum: int + :keyword minimum: Gets or sets the minimum. + :paramtype minimum: int + :keyword scale_type: Gets or sets the type of the scale. Known values are: "Automatic", + "Manual", "None". + :paramtype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType + """ + super(SkuCapacity, self).__init__(**kwargs) + self.default = default + self.maximum = maximum + self.minimum = minimum + self.scale_type = scale_type + + +class SkuResource(msrest.serialization.Model): + """Fulfills ARM Contract requirement to list all available SKUS for a resource. Variables are only populated by the server, and will be ignored when sending a request. - :ivar idle_node_count: Number of compute nodes in idle state. - :vartype idle_node_count: int - :ivar running_node_count: Number of compute nodes which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Number of compute nodes which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Number of compute nodes which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Number of compute nodes which are in preempted state. - :vartype preempted_node_count: int + :ivar capacity: Gets or sets the Sku Capacity. + :vartype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity + :ivar resource_type: The resource type name. + :vartype resource_type: str + :ivar sku: Gets or sets the Sku. + :vartype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting """ _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, + 'resource_type': {'readonly': True}, } - _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + _attribute_map = { + 'capacity': {'key': 'capacity', 'type': 'SkuCapacity'}, + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'sku': {'key': 'sku', 'type': 'SkuSetting'}, } def __init__( self, + *, + capacity: Optional["_models.SkuCapacity"] = None, + sku: Optional["_models.SkuSetting"] = None, **kwargs ): - super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None + """ + :keyword capacity: Gets or sets the Sku Capacity. + :paramtype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity + :keyword sku: Gets or sets the Sku. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting + """ + super(SkuResource, self).__init__(**kwargs) + self.capacity = capacity + self.resource_type = None + self.sku = sku -class NotebookListCredentialsResult(msrest.serialization.Model): - """NotebookListCredentialsResult. +class SkuResourceArmPaginatedResult(msrest.serialization.Model): + """A paginated list of SkuResource entities. - :param primary_access_key: - :type primary_access_key: str - :param secondary_access_key: - :type secondary_access_key: str + :ivar next_link: The link to the next page of SkuResource objects. If null, there are no + additional pages. + :vartype next_link: str + :ivar value: An array of objects of type SkuResource. + :vartype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] """ _attribute_map = { - 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, - 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[SkuResource]'}, } def __init__( self, *, - primary_access_key: Optional[str] = None, - secondary_access_key: Optional[str] = None, + next_link: Optional[str] = None, + value: Optional[List["_models.SkuResource"]] = None, **kwargs ): - super(NotebookListCredentialsResult, self).__init__(**kwargs) - self.primary_access_key = primary_access_key - self.secondary_access_key = secondary_access_key + """ + :keyword next_link: The link to the next page of SkuResource objects. If null, there are no + additional pages. + :paramtype next_link: str + :keyword value: An array of objects of type SkuResource. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] + """ + super(SkuResourceArmPaginatedResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value -class NotebookPreparationError(msrest.serialization.Model): - """NotebookPreparationError. +class SkuSetting(msrest.serialization.Model): + """SkuSetting fulfills the need for stripped down SKU info in ARM contract. + + All required parameters must be populated in order to send to Azure. - :param error_message: - :type error_message: str - :param status_code: - :type status_code: int + :ivar name: Required. [Required] The name of the SKU. Ex - P3. It is typically a letter+number + code. + :vartype name: str + :ivar tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", "Premium". + :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ + _validation = { + 'name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'status_code': {'key': 'statusCode', 'type': 'int'}, + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, } def __init__( self, *, - error_message: Optional[str] = None, - status_code: Optional[int] = None, + name: str, + tier: Optional[Union[str, "_models.SkuTier"]] = None, **kwargs ): - super(NotebookPreparationError, self).__init__(**kwargs) - self.error_message = error_message - self.status_code = status_code + """ + :keyword name: Required. [Required] The name of the SKU. Ex - P3. It is typically a + letter+number code. + :paramtype name: str + :keyword tier: This field is required to be implemented by the Resource Provider if the service + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", "Premium". + :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier + """ + super(SkuSetting, self).__init__(**kwargs) + self.name = name + self.tier = tier -class NotebookResourceInfo(msrest.serialization.Model): - """NotebookResourceInfo. +class SparkJob(JobBaseProperties): + """Spark job definition. - :param fqdn: - :type fqdn: str - :param resource_id: the data plane resourceId that used to initialize notebook component. - :type resource_id: str - :param notebook_preparation_error: The error that occurs when preparing notebook. - :type notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. Known + values are: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar archives: Archive files used in the job. + :vartype archives: list[str] + :ivar args: Arguments for the job. + :vartype args: str + :ivar code_id: Required. [Required] ARM resource ID of the code asset. + :vartype code_id: str + :ivar conf: Spark configured properties. + :vartype conf: dict[str, str] + :ivar entry: Required. [Required] The entry to execute on startup of the job. + :vartype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry + :ivar environment_id: The ARM resource ID of the Environment specification for the job. + :vartype environment_id: str + :ivar files: Files used in the job. + :vartype files: list[str] + :ivar inputs: Mapping of input data bindings used in the job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar jars: Jar files used in the job. + :vartype jars: list[str] + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar py_files: Python files used in the job. + :vartype py_files: list[str] + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration """ + _validation = { + 'job_type': {'required': True}, + 'status': {'readonly': True}, + 'code_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'entry': {'required': True}, + } + _attribute_map = { - 'fqdn': {'key': 'fqdn', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'component_id': {'key': 'componentId', 'type': 'str'}, + 'compute_id': {'key': 'computeId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'services': {'key': 'services', 'type': '{JobService}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'args': {'key': 'args', 'type': 'str'}, + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'conf': {'key': 'conf', 'type': '{str}'}, + 'entry': {'key': 'entry', 'type': 'SparkJobEntry'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, + 'py_files': {'key': 'pyFiles', 'type': '[str]'}, + 'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'}, } def __init__( self, *, - fqdn: Optional[str] = None, - resource_id: Optional[str] = None, - notebook_preparation_error: Optional["NotebookPreparationError"] = None, + code_id: str, + entry: "_models.SparkJobEntry", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: Optional[str] = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: Optional[bool] = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + archives: Optional[List[str]] = None, + args: Optional[str] = None, + conf: Optional[Dict[str, str]] = None, + environment_id: Optional[str] = None, + files: Optional[List[str]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + jars: Optional[List[str]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + py_files: Optional[List[str]] = None, + resources: Optional["_models.SparkResourceConfiguration"] = None, **kwargs ): - super(NotebookResourceInfo, self).__init__(**kwargs) - self.fqdn = fqdn - self.resource_id = resource_id - self.notebook_preparation_error = notebook_preparation_error + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword archives: Archive files used in the job. + :paramtype archives: list[str] + :keyword args: Arguments for the job. + :paramtype args: str + :keyword code_id: Required. [Required] ARM resource ID of the code asset. + :paramtype code_id: str + :keyword conf: Spark configured properties. + :paramtype conf: dict[str, str] + :keyword entry: Required. [Required] The entry to execute on startup of the job. + :paramtype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry + :keyword environment_id: The ARM resource ID of the Environment specification for the job. + :paramtype environment_id: str + :keyword files: Files used in the job. + :paramtype files: list[str] + :keyword inputs: Mapping of input data bindings used in the job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword jars: Jar files used in the job. + :paramtype jars: list[str] + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword py_files: Python files used in the job. + :paramtype py_files: list[str] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration + """ + super(SparkJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, services=services, **kwargs) + self.job_type = 'Spark' # type: str + self.archives = archives + self.args = args + self.code_id = code_id + self.conf = conf + self.entry = entry + self.environment_id = environment_id + self.files = files + self.inputs = inputs + self.jars = jars + self.outputs = outputs + self.py_files = py_files + self.resources = resources + + +class SparkJobEntry(msrest.serialization.Model): + """Spark job entry point definition. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SparkJobScalaEntry. -class Operation(msrest.serialization.Model): - """Azure Machine Learning workspace REST API operation. + All required parameters must be populated in order to send to Azure. - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param display: Display name of operation. - :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled + by server. Known values are: "SparkJobPythonEntry", "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType """ + _validation = { + 'spark_job_entry_type': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, + } + + _subtype_map = { + 'spark_job_entry_type': {'SparkJobPythonEntry': 'SparkJobScalaEntry'} } def __init__( self, - *, - name: Optional[str] = None, - display: Optional["OperationDisplay"] = None, **kwargs ): - super(Operation, self).__init__(**kwargs) - self.name = name - self.display = display + """ + """ + super(SparkJobEntry, self).__init__(**kwargs) + self.spark_job_entry_type = None # type: Optional[str] -class OperationDisplay(msrest.serialization.Model): - """Display name of operation. +class SparkJobPythonEntry(SparkJobEntry): + """SparkJobPythonEntry. + + All required parameters must be populated in order to send to Azure. - :param provider: The resource provider name: Microsoft.MachineLearningExperimentation. - :type provider: str - :param resource: The resource on which the operation is performed. - :type resource: str - :param operation: The operation that users can perform. - :type operation: str - :param description: The description for the operation. - :type description: str + :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled + by server. Known values are: "SparkJobPythonEntry", "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType + :ivar file: Required. [Required] Relative python file path for job entry point. + :vartype file: str """ + _validation = { + 'spark_job_entry_type': {'required': True}, + 'file': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, } def __init__( self, *, - provider: Optional[str] = None, - resource: Optional[str] = None, - operation: Optional[str] = None, - description: Optional[str] = None, + file: str, **kwargs ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = provider - self.resource = resource - self.operation = operation - self.description = description + """ + :keyword file: Required. [Required] Relative python file path for job entry point. + :paramtype file: str + """ + super(SparkJobPythonEntry, self).__init__(**kwargs) + self.spark_job_entry_type = 'SparkJobPythonEntry' # type: str + self.file = file -class OperationListResult(msrest.serialization.Model): - """An array of operations supported by the resource provider. +class SparkJobScalaEntry(SparkJobEntry): + """SparkJobScalaEntry. + + All required parameters must be populated in order to send to Azure. - :param value: List of AML workspace operations supported by the AML workspace resource - provider. - :type value: list[~azure.mgmt.machinelearningservices.models.Operation] + :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled + by server. Known values are: "SparkJobPythonEntry", "SparkJobScalaEntry". + :vartype spark_job_entry_type: str or + ~azure.mgmt.machinelearningservices.models.SparkJobEntryType + :ivar class_name: Required. [Required] Scala class name used as entry point. + :vartype class_name: str """ + _validation = { + 'spark_job_entry_type': {'required': True}, + 'class_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, + 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, } def __init__( self, *, - value: Optional[List["Operation"]] = None, + class_name: str, **kwargs ): - super(OperationListResult, self).__init__(**kwargs) - self.value = value - - -class PaginatedComputeResourcesList(msrest.serialization.Model): - """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. - - :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope. - :type value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str + """ + :keyword class_name: Required. [Required] Scala class name used as entry point. + :paramtype class_name: str + """ + super(SparkJobScalaEntry, self).__init__(**kwargs) + self.spark_job_entry_type = 'SparkJobPythonEntry' # type: str + self.class_name = class_name + + +class SparkResourceConfiguration(msrest.serialization.Model): + """SparkResourceConfiguration. + + :ivar instance_type: Optional type of VM used as supported by the compute target. + :vartype instance_type: str + :ivar runtime_version: Version of spark runtime used for the job. + :vartype runtime_version: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[ComputeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, } def __init__( self, *, - value: Optional[List["ComputeResource"]] = None, - next_link: Optional[str] = None, + instance_type: Optional[str] = None, + runtime_version: Optional[str] = "3.1", **kwargs ): - super(PaginatedComputeResourcesList, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + """ + :keyword instance_type: Optional type of VM used as supported by the compute target. + :paramtype instance_type: str + :keyword runtime_version: Version of spark runtime used for the job. + :paramtype runtime_version: str + """ + super(SparkResourceConfiguration, self).__init__(**kwargs) + self.instance_type = instance_type + self.runtime_version = runtime_version -class PaginatedWorkspaceConnectionsList(msrest.serialization.Model): - """Paginated list of Workspace connection objects. +class SslConfiguration(msrest.serialization.Model): + """The ssl configuration for scoring. - :param value: An array of Workspace connection objects. - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceConnection] - :param next_link: A continuation link (absolute URI) to the next page of results in the list. - :type next_link: str + :ivar status: Enable or disable ssl for scoring. Known values are: "Disabled", "Enabled", + "Auto". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus + :ivar cert: Cert data. + :vartype cert: str + :ivar key: Key data. + :vartype key: str + :ivar cname: CNAME of the cert. + :vartype cname: str + :ivar leaf_domain_label: Leaf domain label of public endpoint. + :vartype leaf_domain_label: str + :ivar overwrite_existing_domain: Indicates whether to overwrite existing domain label. + :vartype overwrite_existing_domain: bool """ _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceConnection]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'cert': {'key': 'cert', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + 'cname': {'key': 'cname', 'type': 'str'}, + 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'}, + 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'}, } def __init__( self, *, - value: Optional[List["WorkspaceConnection"]] = None, - next_link: Optional[str] = None, + status: Optional[Union[str, "_models.SslConfigStatus"]] = None, + cert: Optional[str] = None, + key: Optional[str] = None, + cname: Optional[str] = None, + leaf_domain_label: Optional[str] = None, + overwrite_existing_domain: Optional[bool] = None, **kwargs ): - super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs) - self.value = value - self.next_link = next_link + """ + :keyword status: Enable or disable ssl for scoring. Known values are: "Disabled", "Enabled", + "Auto". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus + :keyword cert: Cert data. + :paramtype cert: str + :keyword key: Key data. + :paramtype key: str + :keyword cname: CNAME of the cert. + :paramtype cname: str + :keyword leaf_domain_label: Leaf domain label of public endpoint. + :paramtype leaf_domain_label: str + :keyword overwrite_existing_domain: Indicates whether to overwrite existing domain label. + :paramtype overwrite_existing_domain: bool + """ + super(SslConfiguration, self).__init__(**kwargs) + self.status = status + self.cert = cert + self.key = key + self.cname = cname + self.leaf_domain_label = leaf_domain_label + self.overwrite_existing_domain = overwrite_existing_domain + + +class StackEnsembleSettings(msrest.serialization.Model): + """Advances setting to customize StackEnsemble run. + + :ivar stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the + meta-learner. + :vartype stack_meta_learner_k_wargs: any + :ivar stack_meta_learner_train_percentage: Specifies the proportion of the training set (when + choosing train and validation type of training) to be reserved for training the meta-learner. + Default value is 0.2. + :vartype stack_meta_learner_train_percentage: float + :ivar stack_meta_learner_type: The meta-learner is a model trained on the output of the + individual heterogeneous models. Known values are: "None", "LogisticRegression", + "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", + "LightGBMRegressor", "LinearRegression". + :vartype stack_meta_learner_type: str or + ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType + """ + _attribute_map = { + 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, + 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, + 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, + } -class Password(msrest.serialization.Model): - """Password. + def __init__( + self, + *, + stack_meta_learner_k_wargs: Optional[Any] = None, + stack_meta_learner_train_percentage: Optional[float] = 0.2, + stack_meta_learner_type: Optional[Union[str, "_models.StackMetaLearnerType"]] = None, + **kwargs + ): + """ + :keyword stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the + meta-learner. + :paramtype stack_meta_learner_k_wargs: any + :keyword stack_meta_learner_train_percentage: Specifies the proportion of the training set + (when choosing train and validation type of training) to be reserved for training the + meta-learner. Default value is 0.2. + :paramtype stack_meta_learner_train_percentage: float + :keyword stack_meta_learner_type: The meta-learner is a model trained on the output of the + individual heterogeneous models. Known values are: "None", "LogisticRegression", + "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", + "LightGBMRegressor", "LinearRegression". + :paramtype stack_meta_learner_type: str or + ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType + """ + super(StackEnsembleSettings, self).__init__(**kwargs) + self.stack_meta_learner_k_wargs = stack_meta_learner_k_wargs + self.stack_meta_learner_train_percentage = stack_meta_learner_train_percentage + self.stack_meta_learner_type = stack_meta_learner_type + + +class StatusMessage(msrest.serialization.Model): + """Active message associated with project. Variables are only populated by the server, and will be ignored when sending a request. - :ivar name: - :vartype name: str - :ivar value: - :vartype value: str + :ivar code: Service-defined message code. + :vartype code: str + :ivar created_date_time: Time in UTC at which the message was created. + :vartype created_date_time: ~datetime.datetime + :ivar level: Severity level of message. Known values are: "Error", "Information", "Warning". + :vartype level: str or ~azure.mgmt.machinelearningservices.models.StatusMessageLevel + :ivar message: A human-readable representation of the message code. + :vartype message: str """ _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, + 'code': {'readonly': True}, + 'created_date_time': {'readonly': True}, + 'level': {'readonly': True}, + 'message': {'readonly': True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, + 'level': {'key': 'level', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Password, self).__init__(**kwargs) - self.name = None - self.value = None + """ + """ + super(StatusMessage, self).__init__(**kwargs) + self.code = None + self.created_date_time = None + self.level = None + self.message = None -class PrivateEndpoint(msrest.serialization.Model): - """The Private Endpoint resource. +class SweepJob(JobBaseProperties): + """Sweep job definition. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The ARM identifier for Private Endpoint. - :vartype id: str + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar component_id: ARM resource ID of the component resource. + :vartype component_id: str + :ivar compute_id: ARM resource ID of the compute resource. + :vartype compute_id: str + :ivar display_name: Display name of job. + :vartype display_name: str + :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :vartype experiment_name: str + :ivar identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. Known + values are: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :ivar services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar early_termination: Early termination policies enable canceling poor-performing runs + before they complete. + :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :ivar inputs: Mapping of input data bindings used in the job. + :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :ivar limits: Sweep Job limit. + :vartype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits + :ivar objective: Required. [Required] Optimization objective. + :vartype objective: ~azure.mgmt.machinelearningservices.models.Objective + :ivar outputs: Mapping of output data bindings used in the job. + :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :ivar sampling_algorithm: Required. [Required] The hyperparameter sampling algorithm. + :vartype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm + :ivar search_space: Required. [Required] A dictionary containing each parameter and its + distribution. The dictionary key is the name of the parameter. + :vartype search_space: any + :ivar trial: Required. [Required] Trial component definition. + :vartype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent """ _validation = { - 'id': {'readonly': True}, + 'job_type': {'required': True}, + 'status': {'readonly': True}, + 'objective': {'required': True}, + 'sampling_algorithm': {'required': True}, + 'search_space': {'required': True}, + 'trial': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'component_id': {'key': 'componentId', 'type': 'str'}, + 'compute_id': {'key': 'computeId', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'services': {'key': 'services', 'type': '{JobService}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, + 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, + 'limits': {'key': 'limits', 'type': 'SweepJobLimits'}, + 'objective': {'key': 'objective', 'type': 'Objective'}, + 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, + 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'SamplingAlgorithm'}, + 'search_space': {'key': 'searchSpace', 'type': 'object'}, + 'trial': {'key': 'trial', 'type': 'TrialComponent'}, } def __init__( self, + *, + objective: "_models.Objective", + sampling_algorithm: "_models.SamplingAlgorithm", + search_space: Any, + trial: "_models.TrialComponent", + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + component_id: Optional[str] = None, + compute_id: Optional[str] = None, + display_name: Optional[str] = None, + experiment_name: Optional[str] = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: Optional[bool] = False, + services: Optional[Dict[str, "_models.JobService"]] = None, + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + limits: Optional["_models.SweepJobLimits"] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, **kwargs ): - super(PrivateEndpoint, self).__init__(**kwargs) - self.id = None - + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword component_id: ARM resource ID of the component resource. + :paramtype component_id: str + :keyword compute_id: ARM resource ID of the compute resource. + :paramtype compute_id: str + :keyword display_name: Display name of job. + :paramtype display_name: str + :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is + placed in the "Default" experiment. + :paramtype experiment_name: str + :keyword identity: Identity configuration. If set, this should be one of AmlToken, + ManagedIdentity, UserIdentity or null. + Defaults to AmlToken if null. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword services: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of FileStreamObject. + :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] + :keyword early_termination: Early termination policies enable canceling poor-performing runs + before they complete. + :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :keyword inputs: Mapping of input data bindings used in the job. + :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] + :keyword limits: Sweep Job limit. + :paramtype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits + :keyword objective: Required. [Required] Optimization objective. + :paramtype objective: ~azure.mgmt.machinelearningservices.models.Objective + :keyword outputs: Mapping of output data bindings used in the job. + :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] + :keyword sampling_algorithm: Required. [Required] The hyperparameter sampling algorithm. + :paramtype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm + :keyword search_space: Required. [Required] A dictionary containing each parameter and its + distribution. The dictionary key is the name of the parameter. + :paramtype search_space: any + :keyword trial: Required. [Required] Trial component definition. + :paramtype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent + """ + super(SweepJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, services=services, **kwargs) + self.job_type = 'Sweep' # type: str + self.early_termination = early_termination + self.inputs = inputs + self.limits = limits + self.objective = objective + self.outputs = outputs + self.sampling_algorithm = sampling_algorithm + self.search_space = search_space + self.trial = trial + + +class SweepJobLimits(JobLimits): + """Sweep Job limit class. -class PrivateEndpointConnection(msrest.serialization.Model): - """The Private Endpoint Connection resource. - - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: ResourceId of the private endpoint connection. - :vartype id: str - :ivar name: Friendly name of the private endpoint connection. - :vartype name: str - :ivar type: Resource type of private endpoint connection. - :vartype type: str - :param private_endpoint: The resource of private end point. - :type private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpoint - :param private_link_service_connection_state: A collection of information about the state of - the connection between service consumer and provider. - :type private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The provisioning state of the private endpoint connection resource. - Possible values include: "Succeeded", "Creating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState + :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Known + values are: "Command", "Sweep". + :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType + :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. + Only supports duration with precision as low as Seconds. + :vartype timeout: ~datetime.timedelta + :ivar max_concurrent_trials: Sweep Job max concurrent trials. + :vartype max_concurrent_trials: int + :ivar max_total_trials: Sweep Job max total trials. + :vartype max_total_trials: int + :ivar trial_timeout: Sweep Job Trial timeout value. + :vartype trial_timeout: ~datetime.timedelta """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + 'job_limits_type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, + 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, + 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, } def __init__( self, *, - private_endpoint: Optional["PrivateEndpoint"] = None, - private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None, + timeout: Optional[datetime.timedelta] = None, + max_concurrent_trials: Optional[int] = None, + max_total_trials: Optional[int] = None, + trial_timeout: Optional[datetime.timedelta] = None, **kwargs ): - super(PrivateEndpointConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.private_endpoint = private_endpoint - self.private_link_service_connection_state = private_link_service_connection_state - self.provisioning_state = None - - -class PrivateLinkResource(Resource): - """A private link resource. + """ + :keyword timeout: The max run duration in ISO 8601 format, after which the job will be + cancelled. Only supports duration with precision as low as Seconds. + :paramtype timeout: ~datetime.timedelta + :keyword max_concurrent_trials: Sweep Job max concurrent trials. + :paramtype max_concurrent_trials: int + :keyword max_total_trials: Sweep Job max total trials. + :paramtype max_total_trials: int + :keyword trial_timeout: Sweep Job Trial timeout value. + :paramtype trial_timeout: ~datetime.timedelta + """ + super(SweepJobLimits, self).__init__(timeout=timeout, **kwargs) + self.job_limits_type = 'Sweep' # type: str + self.max_concurrent_trials = max_concurrent_trials + self.max_total_trials = max_total_trials + self.trial_timeout = trial_timeout + + +class SynapseSpark(Compute): + """A SynapseSpark compute. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar group_id: The private link resource group id. - :vartype group_id: str - :ivar required_members: The private link resource required member names. - :vartype required_members: list[str] - :param required_zone_names: The private link resource Private link DNS zone name. - :type required_zone_names: list[str] + All required parameters must be populated in order to send to Azure. + + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: ~datetime.datetime + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'group_id': {'readonly': True}, - 'required_members': {'readonly': True}, + 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, } def __init__( self, *, - identity: Optional["Identity"] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, - required_zone_names: Optional[List[str]] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + properties: Optional["_models.SynapseSparkProperties"] = None, **kwargs ): - super(PrivateLinkResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs) - self.group_id = None - self.required_members = None - self.required_zone_names = required_zone_names - + """ + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + :keyword properties: + :paramtype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties + """ + super(SynapseSpark, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) + self.compute_type = 'SynapseSpark' # type: str + self.properties = properties -class PrivateLinkResourceListResult(msrest.serialization.Model): - """A list of private link resources. - :param value: Array of private link resources. - :type value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] +class SynapseSparkProperties(msrest.serialization.Model): + """SynapseSparkProperties. + + :ivar auto_scale_properties: Auto scale properties. + :vartype auto_scale_properties: ~azure.mgmt.machinelearningservices.models.AutoScaleProperties + :ivar auto_pause_properties: Auto pause properties. + :vartype auto_pause_properties: ~azure.mgmt.machinelearningservices.models.AutoPauseProperties + :ivar spark_version: Spark version. + :vartype spark_version: str + :ivar node_count: The number of compute nodes currently assigned to the compute. + :vartype node_count: int + :ivar node_size: Node size. + :vartype node_size: str + :ivar node_size_family: Node size family. + :vartype node_size_family: str + :ivar subscription_id: Azure subscription identifier. + :vartype subscription_id: str + :ivar resource_group: Name of the resource group in which workspace is located. + :vartype resource_group: str + :ivar workspace_name: Name of Azure Machine Learning workspace. + :vartype workspace_name: str + :ivar pool_name: Pool name. + :vartype pool_name: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'}, + 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'}, + 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'pool_name': {'key': 'poolName', 'type': 'str'}, } def __init__( self, *, - value: Optional[List["PrivateLinkResource"]] = None, + auto_scale_properties: Optional["_models.AutoScaleProperties"] = None, + auto_pause_properties: Optional["_models.AutoPauseProperties"] = None, + spark_version: Optional[str] = None, + node_count: Optional[int] = None, + node_size: Optional[str] = None, + node_size_family: Optional[str] = None, + subscription_id: Optional[str] = None, + resource_group: Optional[str] = None, + workspace_name: Optional[str] = None, + pool_name: Optional[str] = None, **kwargs ): - super(PrivateLinkResourceListResult, self).__init__(**kwargs) - self.value = value - - -class PrivateLinkServiceConnectionState(msrest.serialization.Model): - """A collection of information about the state of the connection between service consumer and provider. - - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus - :param description: The reason for approval/rejection of the connection. - :type description: str - :param actions_required: A message indicating if changes on the service provider require any - updates on the consumer. - :type actions_required: str + """ + :keyword auto_scale_properties: Auto scale properties. + :paramtype auto_scale_properties: + ~azure.mgmt.machinelearningservices.models.AutoScaleProperties + :keyword auto_pause_properties: Auto pause properties. + :paramtype auto_pause_properties: + ~azure.mgmt.machinelearningservices.models.AutoPauseProperties + :keyword spark_version: Spark version. + :paramtype spark_version: str + :keyword node_count: The number of compute nodes currently assigned to the compute. + :paramtype node_count: int + :keyword node_size: Node size. + :paramtype node_size: str + :keyword node_size_family: Node size family. + :paramtype node_size_family: str + :keyword subscription_id: Azure subscription identifier. + :paramtype subscription_id: str + :keyword resource_group: Name of the resource group in which workspace is located. + :paramtype resource_group: str + :keyword workspace_name: Name of Azure Machine Learning workspace. + :paramtype workspace_name: str + :keyword pool_name: Pool name. + :paramtype pool_name: str + """ + super(SynapseSparkProperties, self).__init__(**kwargs) + self.auto_scale_properties = auto_scale_properties + self.auto_pause_properties = auto_pause_properties + self.spark_version = spark_version + self.node_count = node_count + self.node_size = node_size + self.node_size_family = node_size_family + self.subscription_id = subscription_id + self.resource_group = resource_group + self.workspace_name = workspace_name + self.pool_name = pool_name + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", "Key". + :vartype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, } def __init__( self, *, - status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, - description: Optional[str] = None, - actions_required: Optional[str] = None, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, **kwargs ): - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.status = status - self.description = description - self.actions_required = actions_required + """ + :keyword created_by: The identity that created the resource. + :paramtype created_by: str + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", "Key". + :paramtype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType + :keyword created_at: The timestamp of resource creation (UTC). + :paramtype created_at: ~datetime.datetime + :keyword last_modified_by: The identity that last modified the resource. + :paramtype last_modified_by: str + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", "Key". + :paramtype last_modified_by_type: str or + ~azure.mgmt.machinelearningservices.models.CreatedByType + :keyword last_modified_at: The timestamp of resource last modification (UTC). + :paramtype last_modified_at: ~datetime.datetime + """ + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at -class QuotaBaseProperties(msrest.serialization.Model): - """The properties for Quota update or retrieval. +class SystemService(msrest.serialization.Model): + """A system service running on a compute. + + Variables are only populated by the server, and will be ignored when sending a request. - :param id: Specifies the resource ID. - :type id: str - :param type: Specifies the resource type. - :type type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :param unit: An enum describing the unit of quota measurement. Possible values include: - "Count". - :type unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + :ivar system_service_type: The type of this system service. + :vartype system_service_type: str + :ivar public_ip_address: Public IP address. + :vartype public_ip_address: str + :ivar version: The version for this type. + :vartype version: str """ + _validation = { + 'system_service_type': {'readonly': True}, + 'public_ip_address': {'readonly': True}, + 'version': {'readonly': True}, + } + _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, } def __init__( self, - *, - id: Optional[str] = None, - type: Optional[str] = None, - limit: Optional[int] = None, - unit: Optional[Union[str, "QuotaUnit"]] = None, **kwargs ): - super(QuotaBaseProperties, self).__init__(**kwargs) - self.id = id - self.type = type - self.limit = limit - self.unit = unit - + """ + """ + super(SystemService, self).__init__(**kwargs) + self.system_service_type = None + self.public_ip_address = None + self.version = None -class QuotaUpdateParameters(msrest.serialization.Model): - """Quota update parameters. - :param value: The list for update quota. - :type value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] +class TableVerticalFeaturizationSettings(FeaturizationSettings): + """Featurization Configuration. + + :ivar dataset_language: Dataset language, useful for the text data. + :vartype dataset_language: str + :ivar blocked_transformers: These transformers shall not be used in featurization. + :vartype blocked_transformers: list[str or + ~azure.mgmt.machinelearningservices.models.BlockedTransformers] + :ivar column_name_and_types: Dictionary of column name and its type (int, float, string, + datetime etc). + :vartype column_name_and_types: dict[str, str] + :ivar enable_dnn_featurization: Determines whether to use Dnn based featurizers for data + featurization. + :vartype enable_dnn_featurization: bool + :ivar mode: Featurization mode - User can keep the default 'Auto' mode and AutoML will take + care of necessary transformation of the data in featurization phase. + If 'Off' is selected then no featurization is done. + If 'Custom' is selected then user can specify additional inputs to customize how featurization + is done. Known values are: "Auto", "Custom", "Off". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode + :ivar transformer_params: User can specify additional transformers to be used along with the + columns to which it would be applied and parameters for the transformer constructor. + :vartype transformer_params: dict[str, + list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] """ _attribute_map = { - 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, + 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, + 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, + 'column_name_and_types': {'key': 'columnNameAndTypes', 'type': '{str}'}, + 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'}, } def __init__( self, *, - value: Optional[List["QuotaBaseProperties"]] = None, + dataset_language: Optional[str] = None, + blocked_transformers: Optional[List[Union[str, "_models.BlockedTransformers"]]] = None, + column_name_and_types: Optional[Dict[str, str]] = None, + enable_dnn_featurization: Optional[bool] = False, + mode: Optional[Union[str, "_models.FeaturizationMode"]] = None, + transformer_params: Optional[Dict[str, List["_models.ColumnTransformer"]]] = None, **kwargs ): - super(QuotaUpdateParameters, self).__init__(**kwargs) - self.value = value + """ + :keyword dataset_language: Dataset language, useful for the text data. + :paramtype dataset_language: str + :keyword blocked_transformers: These transformers shall not be used in featurization. + :paramtype blocked_transformers: list[str or + ~azure.mgmt.machinelearningservices.models.BlockedTransformers] + :keyword column_name_and_types: Dictionary of column name and its type (int, float, string, + datetime etc). + :paramtype column_name_and_types: dict[str, str] + :keyword enable_dnn_featurization: Determines whether to use Dnn based featurizers for data + featurization. + :paramtype enable_dnn_featurization: bool + :keyword mode: Featurization mode - User can keep the default 'Auto' mode and AutoML will take + care of necessary transformation of the data in featurization phase. + If 'Off' is selected then no featurization is done. + If 'Custom' is selected then user can specify additional inputs to customize how featurization + is done. Known values are: "Auto", "Custom", "Off". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode + :keyword transformer_params: User can specify additional transformers to be used along with the + columns to which it would be applied and parameters for the transformer constructor. + :paramtype transformer_params: dict[str, + list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] + """ + super(TableVerticalFeaturizationSettings, self).__init__(dataset_language=dataset_language, **kwargs) + self.blocked_transformers = blocked_transformers + self.column_name_and_types = column_name_and_types + self.enable_dnn_featurization = enable_dnn_featurization + self.mode = mode + self.transformer_params = transformer_params + + +class TableVerticalLimitSettings(msrest.serialization.Model): + """Job execution constraints. + + :ivar enable_early_termination: Enable early termination, determines whether or not if + AutoMLJob will terminate early if there is no score improvement in last 20 iterations. + :vartype enable_early_termination: bool + :ivar exit_score: Exit score for the AutoML job. + :vartype exit_score: float + :ivar max_concurrent_trials: Maximum Concurrent iterations. + :vartype max_concurrent_trials: int + :ivar max_cores_per_trial: Max cores per iteration. + :vartype max_cores_per_trial: int + :ivar max_trials: Number of iterations. + :vartype max_trials: int + :ivar timeout: AutoML job timeout. + :vartype timeout: ~datetime.timedelta + :ivar trial_timeout: Iteration timeout. + :vartype trial_timeout: ~datetime.timedelta + """ + _attribute_map = { + 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, + 'exit_score': {'key': 'exitScore', 'type': 'float'}, + 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, + 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, + 'max_trials': {'key': 'maxTrials', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, + } -class RegistryListCredentialsResult(msrest.serialization.Model): - """RegistryListCredentialsResult. + def __init__( + self, + *, + enable_early_termination: Optional[bool] = True, + exit_score: Optional[float] = None, + max_concurrent_trials: Optional[int] = 1, + max_cores_per_trial: Optional[int] = -1, + max_trials: Optional[int] = 1000, + timeout: Optional[datetime.timedelta] = "PT6H", + trial_timeout: Optional[datetime.timedelta] = "PT30M", + **kwargs + ): + """ + :keyword enable_early_termination: Enable early termination, determines whether or not if + AutoMLJob will terminate early if there is no score improvement in last 20 iterations. + :paramtype enable_early_termination: bool + :keyword exit_score: Exit score for the AutoML job. + :paramtype exit_score: float + :keyword max_concurrent_trials: Maximum Concurrent iterations. + :paramtype max_concurrent_trials: int + :keyword max_cores_per_trial: Max cores per iteration. + :paramtype max_cores_per_trial: int + :keyword max_trials: Number of iterations. + :paramtype max_trials: int + :keyword timeout: AutoML job timeout. + :paramtype timeout: ~datetime.timedelta + :keyword trial_timeout: Iteration timeout. + :paramtype trial_timeout: ~datetime.timedelta + """ + super(TableVerticalLimitSettings, self).__init__(**kwargs) + self.enable_early_termination = enable_early_termination + self.exit_score = exit_score + self.max_concurrent_trials = max_concurrent_trials + self.max_cores_per_trial = max_cores_per_trial + self.max_trials = max_trials + self.timeout = timeout + self.trial_timeout = trial_timeout + + +class TargetUtilizationScaleSettings(OnlineScaleSettings): + """TargetUtilizationScaleSettings. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar location: - :vartype location: str - :ivar username: - :vartype username: str - :param passwords: - :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] + :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by + server. Known values are: "Default", "TargetUtilization". + :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType + :ivar max_instances: The maximum number of instances that the deployment can scale to. The + quota will be reserved for max_instances. + :vartype max_instances: int + :ivar min_instances: The minimum number of instances to always be present. + :vartype min_instances: int + :ivar polling_interval: The polling interval in ISO 8691 format. Only supports duration with + precision as low as Seconds. + :vartype polling_interval: ~datetime.timedelta + :ivar target_utilization_percentage: Target CPU usage for the autoscaler. + :vartype target_utilization_percentage: int """ _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, + 'scale_type': {'required': True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + 'max_instances': {'key': 'maxInstances', 'type': 'int'}, + 'min_instances': {'key': 'minInstances', 'type': 'int'}, + 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'}, + 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'}, } def __init__( self, *, - passwords: Optional[List["Password"]] = None, + max_instances: Optional[int] = 1, + min_instances: Optional[int] = 1, + polling_interval: Optional[datetime.timedelta] = "PT1S", + target_utilization_percentage: Optional[int] = 70, **kwargs ): - super(RegistryListCredentialsResult, self).__init__(**kwargs) - self.location = None - self.username = None - self.passwords = passwords - - -class ResourceId(msrest.serialization.Model): - """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. + """ + :keyword max_instances: The maximum number of instances that the deployment can scale to. The + quota will be reserved for max_instances. + :paramtype max_instances: int + :keyword min_instances: The minimum number of instances to always be present. + :paramtype min_instances: int + :keyword polling_interval: The polling interval in ISO 8691 format. Only supports duration with + precision as low as Seconds. + :paramtype polling_interval: ~datetime.timedelta + :keyword target_utilization_percentage: Target CPU usage for the autoscaler. + :paramtype target_utilization_percentage: int + """ + super(TargetUtilizationScaleSettings, self).__init__(**kwargs) + self.scale_type = 'TargetUtilization' # type: str + self.max_instances = max_instances + self.min_instances = min_instances + self.polling_interval = polling_interval + self.target_utilization_percentage = target_utilization_percentage + + +class TensorFlow(DistributionConfiguration): + """TensorFlow distribution configuration. All required parameters must be populated in order to send to Azure. - :param id: Required. The ID of the resource. - :type id: str + :ivar distribution_type: Required. [Required] Specifies the type of distribution + framework.Constant filled by server. Known values are: "PyTorch", "TensorFlow", "Mpi". + :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType + :ivar parameter_server_count: Number of parameter server tasks. + :vartype parameter_server_count: int + :ivar worker_count: Number of workers. If not specified, will default to the instance count. + :vartype worker_count: int """ _validation = { - 'id': {'required': True}, + 'distribution_type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, + 'worker_count': {'key': 'workerCount', 'type': 'int'}, } def __init__( self, *, - id: str, + parameter_server_count: Optional[int] = 0, + worker_count: Optional[int] = None, **kwargs ): - super(ResourceId, self).__init__(**kwargs) - self.id = id + """ + :keyword parameter_server_count: Number of parameter server tasks. + :paramtype parameter_server_count: int + :keyword worker_count: Number of workers. If not specified, will default to the instance count. + :paramtype worker_count: int + """ + super(TensorFlow, self).__init__(**kwargs) + self.distribution_type = 'TensorFlow' # type: str + self.parameter_server_count = parameter_server_count + self.worker_count = worker_count + + +class TextClassification(AutoMLVertical, NlpVertical): + """Text Classification task in AutoML NLP vertical. +NLP - Natural Language Processing. + All required parameters must be populated in order to send to Azure. -class ResourceName(msrest.serialization.Model): - """The Resource Name. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-Classification task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, } def __init__( self, + *, + training_data: "_models.MLTableJobInput", + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, **kwargs ): - super(ResourceName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class ResourceQuota(msrest.serialization.Model): - """The quota assigned to a resource. + """ + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword primary_metric: Primary metric for Text-Classification task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :paramtype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics + """ + super(TextClassification, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, featurization_settings=featurization_settings, limit_settings=limit_settings, validation_data=validation_data, **kwargs) + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.validation_data = validation_data + self.task_type = 'TextClassification' # type: str + self.primary_metric = primary_metric + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class TextClassificationMultilabel(AutoMLVertical, NlpVertical): + """Text Classification Multilabel task in AutoML NLP vertical. +NLP - Natural Language Processing. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar name: Name of the resource. - :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName - :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + All required parameters must be populated in order to send to Azure. + + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-Classification-Multilabel task. + Currently only Accuracy is supported as primary metric, hence user need not set it explicitly. + Known values are: "AUCWeighted", "Accuracy", "NormMacroRecall", + "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "IOU". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'name': {'readonly': True}, - 'limit': {'readonly': True}, - 'unit': {'readonly': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, + 'primary_metric': {'readonly': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'ResourceName'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, } def __init__( self, + *, + training_data: "_models.MLTableJobInput", + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, **kwargs ): - super(ResourceQuota, self).__init__(**kwargs) - self.id = None - self.type = None - self.name = None - self.limit = None - self.unit = None - - -class ResourceSkuLocationInfo(msrest.serialization.Model): - """ResourceSkuLocationInfo. + """ + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super(TextClassificationMultilabel, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, featurization_settings=featurization_settings, limit_settings=limit_settings, validation_data=validation_data, **kwargs) + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.validation_data = validation_data + self.task_type = 'TextClassificationMultilabel' # type: str + self.primary_metric = None + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class TextNer(AutoMLVertical, NlpVertical): + """Text-NER task in AutoML NLP vertical. +NER - Named Entity Recognition. +NLP - Natural Language Processing. Variables are only populated by the server, and will be ignored when sending a request. - :ivar location: Location of the SKU. - :vartype location: str - :ivar zones: List of availability zones where the SKU is supported. - :vartype zones: list[str] - :ivar zone_details: Details of capabilities available to a SKU in specific zones. - :vartype zone_details: list[~azure.mgmt.machinelearningservices.models.ResourceSkuZoneDetails] + All required parameters must be populated in order to send to Azure. + + :ivar featurization_settings: Featurization inputs needed for AutoML job. + :vartype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :ivar limit_settings: Execution constraints for AutoMLJob. + :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :ivar validation_data: Validation data inputs. + :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. Known + values are: "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: Required. [Required] Training data input. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar primary_metric: Primary metric for Text-NER task. + Only 'Accuracy' is supported for Text-NER, so user need not set this explicitly. Known values + are: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + "PrecisionScoreWeighted". + :vartype primary_metric: str or + ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'location': {'readonly': True}, - 'zones': {'readonly': True}, - 'zone_details': {'readonly': True}, + 'task_type': {'required': True}, + 'training_data': {'required': True}, + 'primary_metric': {'readonly': True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'zones': {'key': 'zones', 'type': '[str]'}, - 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, + 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, + 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, + 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, + 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, + 'task_type': {'key': 'taskType', 'type': 'str'}, + 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, } def __init__( self, + *, + training_data: "_models.MLTableJobInput", + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, **kwargs ): - super(ResourceSkuLocationInfo, self).__init__(**kwargs) - self.location = None - self.zones = None - self.zone_details = None - - -class ResourceSkuZoneDetails(msrest.serialization.Model): - """Describes The zonal capabilities of a SKU. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The set of zones that the SKU is available in with the specified capabilities. - :vartype name: list[str] - :ivar capabilities: A list of capabilities that are available for the SKU in the specified list - of zones. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] + """ + :keyword featurization_settings: Featurization inputs needed for AutoML job. + :paramtype featurization_settings: + ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings + :keyword limit_settings: Execution constraints for AutoMLJob. + :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings + :keyword validation_data: Validation data inputs. + :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: Required. [Required] Training data input. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + """ + super(TextNer, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, featurization_settings=featurization_settings, limit_settings=limit_settings, validation_data=validation_data, **kwargs) + self.featurization_settings = featurization_settings + self.limit_settings = limit_settings + self.validation_data = validation_data + self.task_type = 'TextNER' # type: str + self.primary_metric = None + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.training_data = training_data + + +class TmpfsOptions(msrest.serialization.Model): + """TmpfsOptions. + + :ivar size: Mention the Tmpfs size. + :vartype size: int """ - _validation = { - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, - } - _attribute_map = { - 'name': {'key': 'name', 'type': '[str]'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + 'size': {'key': 'size', 'type': 'int'}, } def __init__( self, + *, + size: Optional[int] = None, **kwargs ): - super(ResourceSkuZoneDetails, self).__init__(**kwargs) - self.name = None - self.capabilities = None + """ + :keyword size: Mention the Tmpfs size. + :paramtype size: int + """ + super(TmpfsOptions, self).__init__(**kwargs) + self.size = size -class Restriction(msrest.serialization.Model): - """The restriction because of which SKU cannot be used. +class TrialComponent(msrest.serialization.Model): + """Trial component definition. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar type: The type of restrictions. As of now only possible value for this is location. - :vartype type: str - :ivar values: The value of restrictions. If the restriction type is set to location. This would - be different locations where the SKU is restricted. - :vartype values: list[str] - :param reason_code: The reason for the restriction. Possible values include: "NotSpecified", - "NotAvailableForRegion", "NotAvailableForSubscription". - :type reason_code: str or ~azure.mgmt.machinelearningservices.models.ReasonCode + :ivar code_id: ARM resource ID of the code asset. + :vartype code_id: str + :ivar command: Required. [Required] The command to execute on startup of the job. eg. "python + train.py". + :vartype command: str + :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, + Tensorflow, PyTorch, or null. + :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :ivar environment_id: Required. [Required] The ARM resource ID of the Environment specification + for the job. + :vartype environment_id: str + :ivar environment_variables: Environment variables included in the job. + :vartype environment_variables: dict[str, str] + :ivar resources: Compute Resource configuration for the job. + :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ _validation = { - 'type': {'readonly': True}, - 'values': {'readonly': True}, + 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + 'environment_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, } def __init__( self, *, - reason_code: Optional[Union[str, "ReasonCode"]] = None, + command: str, + environment_id: str, + code_id: Optional[str] = None, + distribution: Optional["_models.DistributionConfiguration"] = None, + environment_variables: Optional[Dict[str, str]] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, **kwargs ): - super(Restriction, self).__init__(**kwargs) - self.type = None - self.values = None - self.reason_code = reason_code - - -class ScaleSettings(msrest.serialization.Model): - """scale settings for AML Compute. + """ + :keyword code_id: ARM resource ID of the code asset. + :paramtype code_id: str + :keyword command: Required. [Required] The command to execute on startup of the job. eg. + "python train.py". + :paramtype command: str + :keyword distribution: Distribution configuration of the job. If set, this should be one of + Mpi, Tensorflow, PyTorch, or null. + :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :keyword environment_id: Required. [Required] The ARM resource ID of the Environment + specification for the job. + :paramtype environment_id: str + :keyword environment_variables: Environment variables included in the job. + :paramtype environment_variables: dict[str, str] + :keyword resources: Compute Resource configuration for the job. + :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration + """ + super(TrialComponent, self).__init__(**kwargs) + self.code_id = code_id + self.command = command + self.distribution = distribution + self.environment_id = environment_id + self.environment_variables = environment_variables + self.resources = resources + + +class TritonModelJobInput(JobInput, AssetJobInput): + """TritonModelJobInput. All required parameters must be populated in order to send to Azure. - :param max_node_count: Required. Max number of nodes to use. - :type max_node_count: int - :param min_node_count: Min number of nodes to use. - :type min_node_count: int - :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. - :type node_idle_time_before_scale_down: ~datetime.timedelta + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: Required. [Required] Input Asset URI. + :vartype uri: str + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType """ _validation = { - 'max_node_count': {'required': True}, + 'uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'job_input_type': {'required': True}, } _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, } def __init__( self, *, - max_node_count: int, - min_node_count: Optional[int] = 0, - node_idle_time_before_scale_down: Optional[datetime.timedelta] = None, + uri: str, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + description: Optional[str] = None, **kwargs ): - super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = max_node_count - self.min_node_count = min_node_count - self.node_idle_time_before_scale_down = node_idle_time_before_scale_down + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: Required. [Required] Input Asset URI. + :paramtype uri: str + :keyword description: Description for the input. + :paramtype description: str + """ + super(TritonModelJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_input_type = 'triton_model' # type: str + self.description = description -class ServicePrincipalCredentials(msrest.serialization.Model): - """Service principal credentials. +class TritonModelJobOutput(JobOutput, AssetJobOutput): + """TritonModelJobOutput. All required parameters must be populated in order to send to Azure. - :param client_id: Required. Client Id. - :type client_id: str - :param client_secret: Required. Client secret. - :type client_secret: str + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by + server. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'client_id': {'required': True}, - 'client_secret': {'required': True}, + 'job_output_type': {'required': True}, } _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, } def __init__( self, *, - client_id: str, - client_secret: str, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + description: Optional[str] = None, **kwargs ): - super(ServicePrincipalCredentials, self).__init__(**kwargs) - self.client_id = client_id - self.client_secret = client_secret + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + :keyword description: Description for the output. + :paramtype description: str + """ + super(TritonModelJobOutput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_output_type = 'triton_model' # type: str + self.description = description -class SharedPrivateLinkResource(msrest.serialization.Model): - """SharedPrivateLinkResource. +class TruncationSelectionPolicy(EarlyTerminationPolicy): + """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval. - :param name: Unique name of the private link. - :type name: str - :param private_link_resource_id: The resource id that private link links to. - :type private_link_resource_id: str - :param group_id: The private link resource group id. - :type group_id: str - :param request_message: Request message. - :type request_message: str - :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner - of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", - "Timeout". - :type status: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + All required parameters must be populated in order to send to Azure. + + :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. + :vartype delay_evaluation: int + :ivar evaluation_interval: Interval (number of runs) between policy evaluations. + :vartype evaluation_interval: int + :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. + Known values are: "Bandit", "MedianStopping", "TruncationSelection". + :vartype policy_type: str or + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType + :ivar truncation_percentage: The percentage of runs to cancel at each evaluation interval. + :vartype truncation_percentage: int """ + _validation = { + 'policy_type': {'required': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'status': {'key': 'properties.status', 'type': 'str'}, + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, } def __init__( self, *, - name: Optional[str] = None, - private_link_resource_id: Optional[str] = None, - group_id: Optional[str] = None, - request_message: Optional[str] = None, - status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, + delay_evaluation: Optional[int] = 0, + evaluation_interval: Optional[int] = 0, + truncation_percentage: Optional[int] = 0, **kwargs ): - super(SharedPrivateLinkResource, self).__init__(**kwargs) - self.name = name - self.private_link_resource_id = private_link_resource_id - self.group_id = group_id - self.request_message = request_message - self.status = status + """ + :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. + :paramtype delay_evaluation: int + :keyword evaluation_interval: Interval (number of runs) between policy evaluations. + :paramtype evaluation_interval: int + :keyword truncation_percentage: The percentage of runs to cancel at each evaluation interval. + :paramtype truncation_percentage: int + """ + super(TruncationSelectionPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type = 'TruncationSelection' # type: str + self.truncation_percentage = truncation_percentage -class Sku(msrest.serialization.Model): - """Sku of the resource. +class UpdateWorkspaceQuotas(msrest.serialization.Model): + """The properties for update Quota response. + + Variables are only populated by the server, and will be ignored when sending a request. - :param name: Name of the sku. - :type name: str - :param tier: Tier of the sku like Basic or Enterprise. - :type tier: str + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: long + :ivar unit: An enum describing the unit of quota measurement. Known values are: "Count". + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + :ivar status: Status of update workspace quota. Known values are: "Undefined", "Success", + "Failure", "InvalidQuotaBelowClusterMinimum", "InvalidQuotaExceedsSubscriptionLimit", + "InvalidVMFamilyName", "OperationNotSupportedForSku", "OperationNotEnabledForRegion". + :vartype status: str or ~azure.mgmt.machinelearningservices.models.Status """ + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'unit': {'readonly': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__( self, *, - name: Optional[str] = None, - tier: Optional[str] = None, + limit: Optional[int] = None, + status: Optional[Union[str, "_models.Status"]] = None, **kwargs ): - super(Sku, self).__init__(**kwargs) - self.name = name - self.tier = tier + """ + :keyword limit: The maximum permitted quota of the resource. + :paramtype limit: long + :keyword status: Status of update workspace quota. Known values are: "Undefined", "Success", + "Failure", "InvalidQuotaBelowClusterMinimum", "InvalidQuotaExceedsSubscriptionLimit", + "InvalidVMFamilyName", "OperationNotSupportedForSku", "OperationNotEnabledForRegion". + :paramtype status: str or ~azure.mgmt.machinelearningservices.models.Status + """ + super(UpdateWorkspaceQuotas, self).__init__(**kwargs) + self.id = None + self.type = None + self.limit = limit + self.unit = None + self.status = status -class SKUCapability(msrest.serialization.Model): - """Features/user capabilities associated with the sku. +class UpdateWorkspaceQuotasResult(msrest.serialization.Model): + """The result of update workspace quota. + + Variables are only populated by the server, and will be ignored when sending a request. - :param name: Capability/Feature ID. - :type name: str - :param value: Details about the feature/capability. - :type value: str + :ivar value: The list of workspace quota update result. + :vartype value: list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] + :ivar next_link: The URI to fetch the next page of workspace quota update result. Call + ListNext() with this to fetch the next page of Workspace Quota update result. + :vartype next_link: str """ + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, - *, - name: Optional[str] = None, - value: Optional[str] = None, **kwargs ): - super(SKUCapability, self).__init__(**kwargs) - self.name = name - self.value = value + """ + """ + super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) + self.value = None + self.next_link = None -class SkuListResult(msrest.serialization.Model): - """List of skus with features. +class UriFileDataVersion(DataVersionBaseProperties): + """uri-file data version entity. - :param value: - :type value: list[~azure.mgmt.machinelearningservices.models.WorkspaceSku] - :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this - URI to fetch the next page of Workspace Skus. - :type next_link: str + All required parameters must be populated in order to send to Azure. + + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. + Known values are: "uri_file", "uri_folder", "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :vartype data_uri: str """ + _validation = { + 'data_type': {'required': True}, + 'data_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'value': {'key': 'value', 'type': '[WorkspaceSku]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'data_uri': {'key': 'dataUri', 'type': 'str'}, } def __init__( self, *, - value: Optional[List["WorkspaceSku"]] = None, - next_link: Optional[str] = None, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, **kwargs ): - super(SkuListResult, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :paramtype data_uri: str + """ + super(UriFileDataVersion, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, **kwargs) + self.data_type = 'uri_file' # type: str + + +class UriFileJobInput(JobInput, AssetJobInput): + """UriFileJobInput. -class SkuSettings(msrest.serialization.Model): - """Describes Workspace Sku details and features. - - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar locations: The set of locations that the SKU is available. This will be supported and - registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). - :vartype locations: list[str] - :ivar location_info: A list of locations and availability zones in those locations where the - SKU is available. - :vartype location_info: - list[~azure.mgmt.machinelearningservices.models.ResourceSkuLocationInfo] - :ivar tier: Sku Tier like Basic or Enterprise. - :vartype tier: str - :ivar resource_type: - :vartype resource_type: str - :ivar name: - :vartype name: str - :ivar capabilities: List of features/user capabilities associated with the sku. - :vartype capabilities: list[~azure.mgmt.machinelearningservices.models.SKUCapability] - :param restrictions: The restrictions because of which SKU cannot be used. This is empty if - there are no restrictions. - :type restrictions: list[~azure.mgmt.machinelearningservices.models.Restriction] + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: Required. [Required] Input Asset URI. + :vartype uri: str + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType """ _validation = { - 'locations': {'readonly': True}, - 'location_info': {'readonly': True}, - 'tier': {'readonly': True}, - 'resource_type': {'readonly': True}, - 'name': {'readonly': True}, - 'capabilities': {'readonly': True}, + 'uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'job_input_type': {'required': True}, } _attribute_map = { - 'locations': {'key': 'locations', 'type': '[str]'}, - 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, - 'tier': {'key': 'tier', 'type': 'str'}, - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, - 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, } def __init__( self, *, - restrictions: Optional[List["Restriction"]] = None, + uri: str, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + description: Optional[str] = None, **kwargs ): - super(SkuSettings, self).__init__(**kwargs) - self.locations = None - self.location_info = None - self.tier = None - self.resource_type = None - self.name = None - self.capabilities = None - self.restrictions = restrictions + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: Required. [Required] Input Asset URI. + :paramtype uri: str + :keyword description: Description for the input. + :paramtype description: str + """ + super(UriFileJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_input_type = 'uri_file' # type: str + self.description = description -class SslConfiguration(msrest.serialization.Model): - """The ssl configuration for scoring. +class UriFileJobOutput(JobOutput, AssetJobOutput): + """UriFileJobOutput. - :param status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled". - :type status: str or ~azure.mgmt.machinelearningservices.models.SslConfigurationStatus - :param cert: Cert data. - :type cert: str - :param key: Key data. - :type key: str - :param cname: CNAME of the cert. - :type cname: str + All required parameters must be populated in order to send to Azure. + + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by + server. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ + _validation = { + 'job_output_type': {'required': True}, + } + _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, } def __init__( self, *, - status: Optional[Union[str, "SslConfigurationStatus"]] = None, - cert: Optional[str] = None, - key: Optional[str] = None, - cname: Optional[str] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + description: Optional[str] = None, **kwargs ): - super(SslConfiguration, self).__init__(**kwargs) - self.status = status - self.cert = cert - self.key = key - self.cname = cname + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + :keyword description: Description for the output. + :paramtype description: str + """ + super(UriFileJobOutput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_output_type = 'uri_file' # type: str + self.description = description -class SystemService(msrest.serialization.Model): - """A system service running on a compute. +class UriFolderDataVersion(DataVersionBaseProperties): + """uri-folder data version entity. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar system_service_type: The type of this system service. - :vartype system_service_type: str - :ivar public_ip_address: Public IP address. - :vartype public_ip_address: str - :ivar version: The version for this type. - :vartype version: str + :ivar description: The asset description text. + :vartype description: str + :ivar properties: The asset property dictionary. + :vartype properties: dict[str, str] + :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :vartype tags: dict[str, str] + :ivar is_anonymous: If the name version are system generated (anonymous registration). + :vartype is_anonymous: bool + :ivar is_archived: Is the asset archived?. + :vartype is_archived: bool + :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. + Known values are: "uri_file", "uri_folder", "mltable". + :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType + :ivar data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :vartype data_uri: str """ _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, + 'data_type': {'required': True}, + 'data_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'data_uri': {'key': 'dataUri', 'type': 'str'}, } def __init__( self, + *, + data_uri: str, + description: Optional[str] = None, + properties: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + is_anonymous: Optional[bool] = False, + is_archived: Optional[bool] = False, **kwargs ): - super(SystemService, self).__init__(**kwargs) - self.system_service_type = None - self.public_ip_address = None - self.version = None - - -class UpdateWorkspaceQuotas(msrest.serialization.Model): - """The properties for update Quota response. + """ + :keyword description: The asset description text. + :paramtype description: str + :keyword properties: The asset property dictionary. + :paramtype properties: dict[str, str] + :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :paramtype tags: dict[str, str] + :keyword is_anonymous: If the name version are system generated (anonymous registration). + :paramtype is_anonymous: bool + :keyword is_archived: Is the asset archived?. + :paramtype is_archived: bool + :keyword data_uri: Required. [Required] Uri of the data. Usage/meaning depends on + Microsoft.MachineLearning.ManagementFrontEnd.Contracts.V20220601Preview.Assets.DataVersionBase.DataType. + :paramtype data_uri: str + """ + super(UriFolderDataVersion, self).__init__(description=description, properties=properties, tags=tags, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, **kwargs) + self.data_type = 'uri_folder' # type: str + + +class UriFolderJobInput(JobInput, AssetJobInput): + """UriFolderJobInput. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :param limit: The maximum permitted quota of the resource. - :type limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - :param status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". - :type status: str or ~azure.mgmt.machinelearningservices.models.Status + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: Required. [Required] Input Asset URI. + :vartype uri: str + :ivar description: Description for the input. + :vartype description: str + :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. + Known values are: "literal", "uri_file", "uri_folder", "mltable", "custom_model", + "mlflow_model", "triton_model". + :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType """ _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, + 'uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'job_input_type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, } def __init__( self, *, - limit: Optional[int] = None, - status: Optional[Union[str, "Status"]] = None, + uri: str, + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + description: Optional[str] = None, **kwargs ): - super(UpdateWorkspaceQuotas, self).__init__(**kwargs) - self.id = None - self.type = None - self.limit = limit - self.unit = None - self.status = status + """ + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: Required. [Required] Input Asset URI. + :paramtype uri: str + :keyword description: Description for the input. + :paramtype description: str + """ + super(UriFolderJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_input_type = 'uri_folder' # type: str + self.description = description -class UpdateWorkspaceQuotasResult(msrest.serialization.Model): - """The result of update workspace quota. +class UriFolderJobOutput(JobOutput, AssetJobOutput): + """UriFolderJobOutput. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar value: The list of workspace quota update result. - :vartype value: list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] - :ivar next_link: The URI to fetch the next page of workspace quota update result. Call - ListNext() with this to fetch the next page of Workspace Quota update result. - :vartype next_link: str + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", "Direct". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :ivar uri: Output Asset URI. + :vartype uri: str + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by + server. Known values are: "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + 'job_output_type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'uri': {'key': 'uri', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, } def __init__( self, + *, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, + uri: Optional[str] = None, + description: Optional[str] = None, **kwargs ): - super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) - self.value = None - self.next_link = None + """ + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", + "Direct". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode + :keyword uri: Output Asset URI. + :paramtype uri: str + :keyword description: Description for the output. + :paramtype description: str + """ + super(UriFolderJobOutput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + self.mode = mode + self.uri = uri + self.job_output_type = 'uri_folder' # type: str + self.description = description class Usage(msrest.serialization.Model): @@ -3207,9 +22213,11 @@ class Usage(msrest.serialization.Model): :ivar id: Specifies the resource ID. :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str :ivar type: Specifies the resource type. :vartype type: str - :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". + :ivar unit: An enum describing the unit of usage measurement. Known values are: "Count". :vartype unit: str or ~azure.mgmt.machinelearningservices.models.UsageUnit :ivar current_value: The current usage of the resource. :vartype current_value: long @@ -3221,6 +22229,7 @@ class Usage(msrest.serialization.Model): _validation = { 'id': {'readonly': True}, + 'aml_workspace_location': {'readonly': True}, 'type': {'readonly': True}, 'unit': {'readonly': True}, 'current_value': {'readonly': True}, @@ -3230,6 +22239,7 @@ class Usage(msrest.serialization.Model): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, + 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, @@ -3241,8 +22251,11 @@ def __init__( self, **kwargs ): + """ + """ super(Usage, self).__init__(**kwargs) self.id = None + self.aml_workspace_location = None self.type = None self.unit = None self.current_value = None @@ -3255,107 +22268,272 @@ class UsageName(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(UsageName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class UserAccountCredentials(msrest.serialization.Model): + """Settings for user account that gets created on each on the nodes of a compute. + + All required parameters must be populated in order to send to Azure. + + :ivar admin_user_name: Required. Name of the administrator user account which can be used to + SSH to nodes. + :vartype admin_user_name: str + :ivar admin_user_ssh_public_key: SSH public key of the administrator user account. + :vartype admin_user_ssh_public_key: str + :ivar admin_user_password: Password of the administrator user account. + :vartype admin_user_password: str + """ + + _validation = { + 'admin_user_name': {'required': True}, + } + + _attribute_map = { + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, + 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, + } + + def __init__( + self, + *, + admin_user_name: str, + admin_user_ssh_public_key: Optional[str] = None, + admin_user_password: Optional[str] = None, + **kwargs + ): + """ + :keyword admin_user_name: Required. Name of the administrator user account which can be used to + SSH to nodes. + :paramtype admin_user_name: str + :keyword admin_user_ssh_public_key: SSH public key of the administrator user account. + :paramtype admin_user_ssh_public_key: str + :keyword admin_user_password: Password of the administrator user account. + :paramtype admin_user_password: str + """ + super(UserAccountCredentials, self).__init__(**kwargs) + self.admin_user_name = admin_user_name + self.admin_user_ssh_public_key = admin_user_ssh_public_key + self.admin_user_password = admin_user_password + + +class UserAssignedIdentity(msrest.serialization.Model): + """User assigned identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal ID of the assigned identity. + :vartype principal_id: str + :ivar client_id: The client ID of the assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + """ + """ + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserIdentity(IdentityConfiguration): + """User identity configuration. + + All required parameters must be populated in order to send to Azure. + + :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant + filled by server. Known values are: "Managed", "AMLToken", "UserIdentity". + :vartype identity_type: str or + ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + 'identity_type': {'required': True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + 'identity_type': {'key': 'identityType', 'type': 'str'}, } def __init__( self, **kwargs ): - super(UsageName, self).__init__(**kwargs) - self.value = None - self.localized_value = None + """ + """ + super(UserIdentity, self).__init__(**kwargs) + self.identity_type = 'UserIdentity' # type: str -class UserAccountCredentials(msrest.serialization.Model): - """Settings for user account that gets created on each on the nodes of a compute. +class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): + """UsernamePasswordAuthTypeWorkspaceConnectionProperties. All required parameters must be populated in order to send to Azure. - :param admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. - :type admin_user_name: str - :param admin_user_ssh_public_key: SSH public key of the administrator user account. - :type admin_user_ssh_public_key: str - :param admin_user_password: Password of the administrator user account. - :type admin_user_password: str + :ivar auth_type: Required. Authentication type of the connection target.Constant filled by + server. Known values are: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS". + :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType + :ivar category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar target: + :vartype target: str + :ivar value: Value details of the workspace connection. + :vartype value: str + :ivar value_format: format for the workspace connection value. Known values are: "JSON". + :vartype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :ivar credentials: + :vartype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword """ _validation = { - 'admin_user_name': {'required': True}, + 'auth_type': {'required': True}, } _attribute_map = { - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, - 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, + 'auth_type': {'key': 'authType', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + 'value_format': {'key': 'valueFormat', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionUsernamePassword'}, } def __init__( self, *, - admin_user_name: str, - admin_user_ssh_public_key: Optional[str] = None, - admin_user_password: Optional[str] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, + target: Optional[str] = None, + value: Optional[str] = None, + value_format: Optional[Union[str, "_models.ValueFormat"]] = None, + credentials: Optional["_models.WorkspaceConnectionUsernamePassword"] = None, **kwargs ): - super(UserAccountCredentials, self).__init__(**kwargs) - self.admin_user_name = admin_user_name - self.admin_user_ssh_public_key = admin_user_ssh_public_key - self.admin_user_password = admin_user_password + """ + :keyword category: Category of the connection. Known values are: "PythonFeed", + "ContainerRegistry", "Git". + :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :keyword target: + :paramtype target: str + :keyword value: Value details of the workspace connection. + :paramtype value: str + :keyword value_format: format for the workspace connection value. Known values are: "JSON". + :paramtype value_format: str or ~azure.mgmt.machinelearningservices.models.ValueFormat + :keyword credentials: + :paramtype credentials: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword + """ + super(UsernamePasswordAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, target=target, value=value, value_format=value_format, **kwargs) + self.auth_type = 'UsernamePassword' # type: str + self.credentials = credentials + + +class VirtualMachineSchema(msrest.serialization.Model): + """VirtualMachineSchema. + + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, + } + + def __init__( + self, + *, + properties: Optional["_models.VirtualMachineSchemaProperties"] = None, + **kwargs + ): + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + """ + super(VirtualMachineSchema, self).__init__(**kwargs) + self.properties = properties -class VirtualMachine(Compute): +class VirtualMachine(Compute, VirtualMachineSchema): """A Machine Learning compute based on Azure Virtual Machines. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param compute_location: Location for the underlying compute. - :type compute_location: str + :ivar properties: + :vartype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType + :ivar compute_location: Location for the underlying compute. + :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. + ~azure.mgmt.machinelearningservices.models.ComputeProvisioningState + :ivar description: The description of the Machine Learning compute. + :vartype description: str + :ivar created_on: The time at which the compute was created. :vartype created_on: ~datetime.datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: ~datetime.datetime - :param resource_id: ARM resource id of the underlying compute. - :type resource_id: str + :ivar resource_id: ARM resource id of the underlying compute. + :vartype resource_id: str :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool - :param properties: - :type properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineProperties + :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI + and AAD exclusively for authentication. + :vartype disable_local_auth: bool """ _validation = { 'compute_type': {'required': True}, + 'compute_location': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'created_on': {'readonly': True}, 'modified_on': {'readonly': True}, @@ -3364,6 +22542,7 @@ class VirtualMachine(Compute): } _attribute_map = { + 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'compute_location': {'key': 'computeLocation', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, @@ -3371,44 +22550,103 @@ class VirtualMachine(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, } def __init__( self, *, - compute_location: Optional[str] = None, + properties: Optional["_models.VirtualMachineSchemaProperties"] = None, description: Optional[str] = None, resource_id: Optional[str] = None, - properties: Optional["VirtualMachineProperties"] = None, + disable_local_auth: Optional[bool] = None, **kwargs ): - super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'VirtualMachine' # type: str + """ + :keyword properties: + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties + :keyword description: The description of the Machine Learning compute. + :paramtype description: str + :keyword resource_id: ARM resource id of the underlying compute. + :paramtype resource_id: str + :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only + MSI and AAD exclusively for authentication. + :paramtype disable_local_auth: bool + """ + super(VirtualMachine, self).__init__(description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) self.properties = properties + self.compute_type = 'VirtualMachine' # type: str + self.compute_location = None + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + + +class VirtualMachineImage(msrest.serialization.Model): + """Virtual Machine image for Windows AML Compute. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Required. Virtual Machine image path. + :vartype id: str + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + *, + id: str, + **kwargs + ): + """ + :keyword id: Required. Virtual Machine image path. + :paramtype id: str + """ + super(VirtualMachineImage, self).__init__(**kwargs) + self.id = id -class VirtualMachineProperties(msrest.serialization.Model): - """VirtualMachineProperties. +class VirtualMachineSchemaProperties(msrest.serialization.Model): + """VirtualMachineSchemaProperties. - :param virtual_machine_size: Virtual Machine size. - :type virtual_machine_size: str - :param ssh_port: Port open for ssh connections. - :type ssh_port: int - :param address: Public IP address of the virtual machine. - :type address: str - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: + :ivar virtual_machine_size: Virtual Machine size. + :vartype virtual_machine_size: str + :ivar ssh_port: Port open for ssh connections. + :vartype ssh_port: int + :ivar notebook_server_port: Notebook server port open for ssh connections. + :vartype notebook_server_port: int + :ivar address: Public IP address of the virtual machine. + :vartype address: str + :ivar administrator_account: Admin credentials for virtual machine. + :vartype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :ivar is_notebook_instance_compute: Indicates whether this compute will be used for running + notebooks. + :vartype is_notebook_instance_compute: bool """ _attribute_map = { 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'}, 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'notebook_server_port': {'key': 'notebookServerPort', 'type': 'int'}, 'address': {'key': 'address', 'type': 'str'}, 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'}, } def __init__( @@ -3416,29 +22654,76 @@ def __init__( *, virtual_machine_size: Optional[str] = None, ssh_port: Optional[int] = None, + notebook_server_port: Optional[int] = None, address: Optional[str] = None, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + is_notebook_instance_compute: Optional[bool] = None, **kwargs ): - super(VirtualMachineProperties, self).__init__(**kwargs) + """ + :keyword virtual_machine_size: Virtual Machine size. + :paramtype virtual_machine_size: str + :keyword ssh_port: Port open for ssh connections. + :paramtype ssh_port: int + :keyword notebook_server_port: Notebook server port open for ssh connections. + :paramtype notebook_server_port: int + :keyword address: Public IP address of the virtual machine. + :paramtype address: str + :keyword administrator_account: Admin credentials for virtual machine. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :keyword is_notebook_instance_compute: Indicates whether this compute will be used for running + notebooks. + :paramtype is_notebook_instance_compute: bool + """ + super(VirtualMachineSchemaProperties, self).__init__(**kwargs) self.virtual_machine_size = virtual_machine_size self.ssh_port = ssh_port + self.notebook_server_port = notebook_server_port self.address = address self.administrator_account = administrator_account + self.is_notebook_instance_compute = is_notebook_instance_compute + + +class VirtualMachineSecretsSchema(msrest.serialization.Model): + """VirtualMachineSecretsSchema. + + :ivar administrator_account: Admin credentials for virtual machine. + :vartype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + + _attribute_map = { + 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + *, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + **kwargs + ): + """ + :keyword administrator_account: Admin credentials for virtual machine. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + super(VirtualMachineSecretsSchema, self).__init__(**kwargs) + self.administrator_account = administrator_account -class VirtualMachineSecrets(ComputeSecrets): +class VirtualMachineSecrets(ComputeSecrets, VirtualMachineSecretsSchema): """Secrets related to a Machine Learning compute based on AKS. All required parameters must be populated in order to send to Azure. - :param compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", - "Databricks", "DataLakeAnalytics". - :type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :param administrator_account: Admin credentials for virtual machine. - :type administrator_account: + :ivar administrator_account: Admin credentials for virtual machine. + :vartype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :ivar compute_type: Required. The type of compute.Constant filled by server. Known values are: + "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", + "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { @@ -3446,19 +22731,24 @@ class VirtualMachineSecrets(ComputeSecrets): } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, } def __init__( self, *, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, **kwargs ): - super(VirtualMachineSecrets, self).__init__(**kwargs) - self.compute_type = 'VirtualMachine' # type: str + """ + :keyword administrator_account: Admin credentials for virtual machine. + :paramtype administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + super(VirtualMachineSecrets, self).__init__(administrator_account=administrator_account, **kwargs) self.administrator_account = administrator_account + self.compute_type = 'VirtualMachine' # type: str class VirtualMachineSize(msrest.serialization.Model): @@ -3485,11 +22775,11 @@ class VirtualMachineSize(msrest.serialization.Model): :vartype low_priority_capable: bool :ivar premium_io: Specifies if the virtual machine size supports premium IO. :vartype premium_io: bool - :param estimated_vm_prices: The estimated price information for using a VM. - :type estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices - :param supported_compute_types: Specifies the compute types supported by the virtual machine + :ivar estimated_vm_prices: The estimated price information for using a VM. + :vartype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices + :ivar supported_compute_types: Specifies the compute types supported by the virtual machine size. - :type supported_compute_types: list[str] + :vartype supported_compute_types: list[str] """ _validation = { @@ -3521,10 +22811,17 @@ class VirtualMachineSize(msrest.serialization.Model): def __init__( self, *, - estimated_vm_prices: Optional["EstimatedVMPrices"] = None, + estimated_vm_prices: Optional["_models.EstimatedVMPrices"] = None, supported_compute_types: Optional[List[str]] = None, **kwargs ): + """ + :keyword estimated_vm_prices: The estimated price information for using a VM. + :paramtype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices + :keyword supported_compute_types: Specifies the compute types supported by the virtual machine + size. + :paramtype supported_compute_types: list[str] + """ super(VirtualMachineSize, self).__init__(**kwargs) self.name = None self.family = None @@ -3542,35 +22839,39 @@ def __init__( class VirtualMachineSizeListResult(msrest.serialization.Model): """The List Virtual Machine size operation response. - :param aml_compute: The list of virtual machine sizes supported by AmlCompute. - :type aml_compute: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] + :ivar value: The list of virtual machine sizes supported by AmlCompute. + :vartype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] """ _attribute_map = { - 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, + 'value': {'key': 'value', 'type': '[VirtualMachineSize]'}, } def __init__( self, *, - aml_compute: Optional[List["VirtualMachineSize"]] = None, + value: Optional[List["_models.VirtualMachineSize"]] = None, **kwargs ): + """ + :keyword value: The list of virtual machine sizes supported by AmlCompute. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] + """ super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.aml_compute = aml_compute + self.value = value class VirtualMachineSshCredentials(msrest.serialization.Model): """Admin credentials for virtual machine. - :param username: Username of admin account. - :type username: str - :param password: Password of admin account. - :type password: str - :param public_key_data: Public key data. - :type public_key_data: str - :param private_key_data: Private key data. - :type private_key_data: str + :ivar username: Username of admin account. + :vartype username: str + :ivar password: Password of admin account. + :vartype password: str + :ivar public_key_data: Public key data. + :vartype public_key_data: str + :ivar private_key_data: Private key data. + :vartype private_key_data: str """ _attribute_map = { @@ -3589,6 +22890,16 @@ def __init__( private_key_data: Optional[str] = None, **kwargs ): + """ + :keyword username: Username of admin account. + :paramtype username: str + :keyword password: Password of admin account. + :paramtype password: str + :keyword public_key_data: Public key data. + :paramtype public_key_data: str + :keyword private_key_data: Private key data. + :paramtype private_key_data: str + """ super(VirtualMachineSshCredentials, self).__init__(**kwargs) self.username = username self.password = password @@ -3596,104 +22907,238 @@ def __init__( self.private_key_data = private_key_data +class VolumeDefinition(msrest.serialization.Model): + """VolumeDefinition. + + :ivar type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Known values + are: "bind", "volume", "tmpfs", "npipe". Default value: "bind". + :vartype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType + :ivar read_only: Indicate whether to mount volume as readOnly. Default value for this is false. + :vartype read_only: bool + :ivar source: Source of the mount. For bind mounts this is the host path. + :vartype source: str + :ivar target: Target of the mount. For bind mounts this is the path in the container. + :vartype target: str + :ivar consistency: Consistency of the volume. + :vartype consistency: str + :ivar bind: Bind Options of the mount. + :vartype bind: ~azure.mgmt.machinelearningservices.models.BindOptions + :ivar volume: Volume Options of the mount. + :vartype volume: ~azure.mgmt.machinelearningservices.models.VolumeOptions + :ivar tmpfs: tmpfs option of the mount. + :vartype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'read_only': {'key': 'readOnly', 'type': 'bool'}, + 'source': {'key': 'source', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'consistency': {'key': 'consistency', 'type': 'str'}, + 'bind': {'key': 'bind', 'type': 'BindOptions'}, + 'volume': {'key': 'volume', 'type': 'VolumeOptions'}, + 'tmpfs': {'key': 'tmpfs', 'type': 'TmpfsOptions'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "_models.VolumeDefinitionType"]] = "bind", + read_only: Optional[bool] = None, + source: Optional[str] = None, + target: Optional[str] = None, + consistency: Optional[str] = None, + bind: Optional["_models.BindOptions"] = None, + volume: Optional["_models.VolumeOptions"] = None, + tmpfs: Optional["_models.TmpfsOptions"] = None, + **kwargs + ): + """ + :keyword type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Known + values are: "bind", "volume", "tmpfs", "npipe". Default value: "bind". + :paramtype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType + :keyword read_only: Indicate whether to mount volume as readOnly. Default value for this is + false. + :paramtype read_only: bool + :keyword source: Source of the mount. For bind mounts this is the host path. + :paramtype source: str + :keyword target: Target of the mount. For bind mounts this is the path in the container. + :paramtype target: str + :keyword consistency: Consistency of the volume. + :paramtype consistency: str + :keyword bind: Bind Options of the mount. + :paramtype bind: ~azure.mgmt.machinelearningservices.models.BindOptions + :keyword volume: Volume Options of the mount. + :paramtype volume: ~azure.mgmt.machinelearningservices.models.VolumeOptions + :keyword tmpfs: tmpfs option of the mount. + :paramtype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions + """ + super(VolumeDefinition, self).__init__(**kwargs) + self.type = type + self.read_only = read_only + self.source = source + self.target = target + self.consistency = consistency + self.bind = bind + self.volume = volume + self.tmpfs = tmpfs + + +class VolumeOptions(msrest.serialization.Model): + """VolumeOptions. + + :ivar nocopy: Indicate whether volume is nocopy. + :vartype nocopy: bool + """ + + _attribute_map = { + 'nocopy': {'key': 'nocopy', 'type': 'bool'}, + } + + def __init__( + self, + *, + nocopy: Optional[bool] = None, + **kwargs + ): + """ + :keyword nocopy: Indicate whether volume is nocopy. + :paramtype nocopy: bool + """ + super(VolumeOptions, self).__init__(**kwargs) + self.nocopy = nocopy + + class Workspace(Resource): """An object that represents a machine learning workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Specifies the resource ID. + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: Specifies the name of the resource. + :ivar name: The name of the resource. :vartype name: str - :param identity: The identity of the resource. - :type identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :param tags: A set of tags. Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar location: Specifies the location of the resource. + :vartype location: str + :ivar tags: A set of tags. Contains resource tags defined as key/value pairs. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku :ivar workspace_id: The immutable id associated with this workspace. :vartype workspace_id: str - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. This name in mutable. - :type friendly_name: str - :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format. - :vartype creation_time: ~datetime.datetime - :param key_vault: ARM id of the key vault associated with this workspace. This cannot be - changed once the workspace has been created. - :type key_vault: str - :param application_insights: ARM id of the application insights associated with this workspace. - This cannot be changed once the workspace has been created. - :type application_insights: str - :param container_registry: ARM id of the container registry associated with this workspace. - This cannot be changed once the workspace has been created. - :type container_registry: str - :param storage_account: ARM id of the storage account associated with this workspace. This + :ivar description: The description of this workspace. + :vartype description: str + :ivar friendly_name: The friendly name for this workspace. This name in mutable. + :vartype friendly_name: str + :ivar key_vault: ARM id of the key vault associated with this workspace. This cannot be changed + once the workspace has been created. + :vartype key_vault: str + :ivar application_insights: ARM id of the application insights associated with this workspace. + :vartype application_insights: str + :ivar container_registry: ARM id of the container registry associated with this workspace. + :vartype container_registry: str + :ivar storage_account: ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created. - :type storage_account: str - :param discovery_url: Url for the discovery service to identify regional endpoints for machine + :vartype storage_account: str + :ivar discovery_url: Url for the discovery service to identify regional endpoints for machine learning experimentation services. - :type discovery_url: str + :vartype discovery_url: str :ivar provisioning_state: The current deployment state of workspace resource. The - provisioningState is to indicate states for resource provisioning. Possible values include: - "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + provisioningState is to indicate states for resource provisioning. Known values are: "Unknown", + "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled", "SoftDeleted". :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param encryption: The encryption settings of Azure ML workspace. - :type encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty - :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + ~azure.mgmt.machinelearningservices.models.WorkspaceProvisioningState + :ivar encryption: The encryption settings of Azure ML workspace. + :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :ivar hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service. - :type hbi_workspace: bool + :vartype hbi_workspace: bool :ivar service_provisioned_resource_group: The name of the managed resource group created by workspace RP in customer subscription if the workspace is CMK workspace. :vartype service_provisioned_resource_group: str :ivar private_link_count: Count of private connections in the workspace. :vartype private_link_count: int - :param image_build_compute: The compute name for image build. - :type image_build_compute: str - :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public - access when behind VNet. - :type allow_public_access_when_behind_vnet: bool + :ivar image_build_compute: The compute name for image build. + :vartype image_build_compute: str + :ivar allow_public_access_when_behind_vnet: The flag to indicate whether to allow public access + when behind VNet. + :vartype allow_public_access_when_behind_vnet: bool + :ivar public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled", "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. :vartype private_endpoint_connections: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] - :param shared_private_link_resources: The list of shared private link resources in this + :ivar shared_private_link_resources: The list of shared private link resources in this workspace. - :type shared_private_link_resources: + :vartype shared_private_link_resources: list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] :ivar notebook_info: The notebook info of Azure ML workspace. :vartype notebook_info: ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo + :ivar service_managed_resources_settings: The service managed resource settings. + :vartype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :ivar primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :vartype primary_user_assigned_identity: str + :ivar tenant_id: The tenant id associated with this workspace. + :vartype tenant_id: str + :ivar storage_hns_enabled: If the storage associated with the workspace has hierarchical + namespace(HNS) enabled. + :vartype storage_hns_enabled: bool + :ivar ml_flow_tracking_uri: The URI associated with this workspace that machine learning flow + must point at to set up tracking. + :vartype ml_flow_tracking_uri: str + :ivar v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided by + the v2 API. + :vartype v1_legacy_mode: bool + :ivar soft_deleted_at: The timestamp when the workspace was soft deleted. + :vartype soft_deleted_at: str + :ivar scheduled_purge_date: The timestamp when the soft deleted workspace is going to be + purged. + :vartype scheduled_purge_date: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, + 'system_data': {'readonly': True}, 'workspace_id': {'readonly': True}, - 'creation_time': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'service_provisioned_resource_group': {'readonly': True}, 'private_link_count': {'readonly': True}, 'private_endpoint_connections': {'readonly': True}, 'notebook_info': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'storage_hns_enabled': {'readonly': True}, + 'ml_flow_tracking_uri': {'readonly': True}, + 'soft_deleted_at': {'readonly': True}, + 'scheduled_purge_date': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, @@ -3706,18 +23151,27 @@ class Workspace(Resource): 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, + 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, + 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, + 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, + 'storage_hns_enabled': {'key': 'properties.storageHnsEnabled', 'type': 'bool'}, + 'ml_flow_tracking_uri': {'key': 'properties.mlFlowTrackingUri', 'type': 'str'}, + 'v1_legacy_mode': {'key': 'properties.v1LegacyMode', 'type': 'bool'}, + 'soft_deleted_at': {'key': 'properties.softDeletedAt', 'type': 'str'}, + 'scheduled_purge_date': {'key': 'properties.scheduledPurgeDate', 'type': 'str'}, } def __init__( self, *, - identity: Optional["Identity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, description: Optional[str] = None, friendly_name: Optional[str] = None, key_vault: Optional[str] = None, @@ -3725,18 +23179,80 @@ def __init__( container_registry: Optional[str] = None, storage_account: Optional[str] = None, discovery_url: Optional[str] = None, - encryption: Optional["EncryptionProperty"] = None, + encryption: Optional["_models.EncryptionProperty"] = None, hbi_workspace: Optional[bool] = False, image_build_compute: Optional[str] = None, allow_public_access_when_behind_vnet: Optional[bool] = False, - shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + shared_private_link_resources: Optional[List["_models.SharedPrivateLinkResource"]] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + primary_user_assigned_identity: Optional[str] = None, + v1_legacy_mode: Optional[bool] = False, **kwargs ): - super(Workspace, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs) + """ + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword location: Specifies the location of the resource. + :paramtype location: str + :keyword tags: A set of tags. Contains resource tags defined as key/value pairs. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword description: The description of this workspace. + :paramtype description: str + :keyword friendly_name: The friendly name for this workspace. This name in mutable. + :paramtype friendly_name: str + :keyword key_vault: ARM id of the key vault associated with this workspace. This cannot be + changed once the workspace has been created. + :paramtype key_vault: str + :keyword application_insights: ARM id of the application insights associated with this + workspace. + :paramtype application_insights: str + :keyword container_registry: ARM id of the container registry associated with this workspace. + :paramtype container_registry: str + :keyword storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :paramtype storage_account: str + :keyword discovery_url: Url for the discovery service to identify regional endpoints for + machine learning experimentation services. + :paramtype discovery_url: str + :keyword encryption: The encryption settings of Azure ML workspace. + :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :keyword hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + collected by the service. + :paramtype hbi_workspace: bool + :keyword image_build_compute: The compute name for image build. + :paramtype image_build_compute: str + :keyword allow_public_access_when_behind_vnet: The flag to indicate whether to allow public + access when behind VNet. + :paramtype allow_public_access_when_behind_vnet: bool + :keyword public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled", "Disabled". + :paramtype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + :keyword shared_private_link_resources: The list of shared private link resources in this + workspace. + :paramtype shared_private_link_resources: + list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] + :keyword service_managed_resources_settings: The service managed resource settings. + :paramtype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :keyword primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :paramtype primary_user_assigned_identity: str + :keyword v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided + by the v2 API. + :paramtype v1_legacy_mode: bool + """ + super(Workspace, self).__init__(**kwargs) + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku self.workspace_id = None self.description = description self.friendly_name = friendly_name - self.creation_time = None self.key_vault = key_vault self.application_insights = application_insights self.container_registry = container_registry @@ -3749,197 +23265,366 @@ def __init__( self.private_link_count = None self.image_build_compute = image_build_compute self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet + self.public_network_access = public_network_access self.private_endpoint_connections = None self.shared_private_link_resources = shared_private_link_resources self.notebook_info = None + self.service_managed_resources_settings = service_managed_resources_settings + self.primary_user_assigned_identity = primary_user_assigned_identity + self.tenant_id = None + self.storage_hns_enabled = None + self.ml_flow_tracking_uri = None + self.v1_legacy_mode = v1_legacy_mode + self.soft_deleted_at = None + self.scheduled_purge_date = None + + +class WorkspaceConnectionManagedIdentity(msrest.serialization.Model): + """WorkspaceConnectionManagedIdentity. + + :ivar resource_id: + :vartype resource_id: str + :ivar client_id: + :vartype client_id: str + """ + + _attribute_map = { + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_id: Optional[str] = None, + client_id: Optional[str] = None, + **kwargs + ): + """ + :keyword resource_id: + :paramtype resource_id: str + :keyword client_id: + :paramtype client_id: str + """ + super(WorkspaceConnectionManagedIdentity, self).__init__(**kwargs) + self.resource_id = resource_id + self.client_id = client_id + + +class WorkspaceConnectionPersonalAccessToken(msrest.serialization.Model): + """WorkspaceConnectionPersonalAccessToken. + + :ivar pat: + :vartype pat: str + """ + + _attribute_map = { + 'pat': {'key': 'pat', 'type': 'str'}, + } + + def __init__( + self, + *, + pat: Optional[str] = None, + **kwargs + ): + """ + :keyword pat: + :paramtype pat: str + """ + super(WorkspaceConnectionPersonalAccessToken, self).__init__(**kwargs) + self.pat = pat -class WorkspaceConnection(msrest.serialization.Model): - """Workspace connection. +class WorkspaceConnectionPropertiesV2BasicResource(Resource): + """WorkspaceConnectionPropertiesV2BasicResource. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: ResourceId of the workspace connection. + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str - :ivar name: Friendly name of the workspace connection. + :ivar name: The name of the resource. :vartype name: str - :ivar type: Resource type of workspace connection. + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". :vartype type: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData + :ivar properties: Required. + :vartype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'properties': {'key': 'properties', 'type': 'WorkspaceConnectionPropertiesV2'}, } def __init__( self, *, - category: Optional[str] = None, - target: Optional[str] = None, - auth_type: Optional[str] = None, - value: Optional[str] = None, + properties: "_models.WorkspaceConnectionPropertiesV2", **kwargs ): - super(WorkspaceConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.category = category - self.target = target - self.auth_type = auth_type - self.value = value + """ + :keyword properties: Required. + :paramtype properties: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 + """ + super(WorkspaceConnectionPropertiesV2BasicResource, self).__init__(**kwargs) + self.properties = properties + +class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(msrest.serialization.Model): + """WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult. -class WorkspaceConnectionDto(msrest.serialization.Model): - """object used for creating workspace connection. + Variables are only populated by the server, and will be ignored when sending a request. - :param name: Friendly name of the workspace connection. - :type name: str - :param category: Category of the workspace connection. - :type category: str - :param target: Target of the workspace connection. - :type target: str - :param auth_type: Authorization type of the workspace connection. - :type auth_type: str - :param value: Value details of the workspace connection. - :type value: str + :ivar value: + :vartype value: + list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :ivar next_link: + :vartype next_link: str """ + _validation = { + 'next_link': {'readonly': True}, + } + _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'category': {'key': 'properties.category', 'type': 'str'}, - 'target': {'key': 'properties.target', 'type': 'str'}, - 'auth_type': {'key': 'properties.authType', 'type': 'str'}, - 'value': {'key': 'properties.value', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[WorkspaceConnectionPropertiesV2BasicResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, - name: Optional[str] = None, - category: Optional[str] = None, - target: Optional[str] = None, - auth_type: Optional[str] = None, - value: Optional[str] = None, + value: Optional[List["_models.WorkspaceConnectionPropertiesV2BasicResource"]] = None, **kwargs ): - super(WorkspaceConnectionDto, self).__init__(**kwargs) - self.name = name - self.category = category - self.target = target - self.auth_type = auth_type + """ + :keyword value: + :paramtype value: + list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + """ + super(WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult, self).__init__(**kwargs) self.value = value + self.next_link = None -class WorkspaceListResult(msrest.serialization.Model): - """The result of a request to list machine learning workspaces. +class WorkspaceConnectionSharedAccessSignature(msrest.serialization.Model): + """WorkspaceConnectionSharedAccessSignature. - :param value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :type value: list[~azure.mgmt.machinelearningservices.models.Workspace] - :param next_link: The URI that can be used to request the next list of machine learning - workspaces. - :type next_link: str + :ivar sas: + :vartype sas: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[Workspace]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'sas': {'key': 'sas', 'type': 'str'}, } def __init__( self, *, - value: Optional[List["Workspace"]] = None, - next_link: Optional[str] = None, + sas: Optional[str] = None, **kwargs ): - super(WorkspaceListResult, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - + """ + :keyword sas: + :paramtype sas: str + """ + super(WorkspaceConnectionSharedAccessSignature, self).__init__(**kwargs) + self.sas = sas -class WorkspaceSku(msrest.serialization.Model): - """AML workspace sku information. - Variables are only populated by the server, and will be ignored when sending a request. +class WorkspaceConnectionUsernamePassword(msrest.serialization.Model): + """WorkspaceConnectionUsernamePassword. - :ivar resource_type: - :vartype resource_type: str - :ivar skus: The list of workspace sku settings. - :vartype skus: list[~azure.mgmt.machinelearningservices.models.SkuSettings] + :ivar username: + :vartype username: str + :ivar password: + :vartype password: str """ - _validation = { - 'resource_type': {'readonly': True}, - 'skus': {'readonly': True}, + _attribute_map = { + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, } + def __init__( + self, + *, + username: Optional[str] = None, + password: Optional[str] = None, + **kwargs + ): + """ + :keyword username: + :paramtype username: str + :keyword password: + :paramtype password: str + """ + super(WorkspaceConnectionUsernamePassword, self).__init__(**kwargs) + self.username = username + self.password = password + + +class WorkspaceListResult(msrest.serialization.Model): + """The result of a request to list machine learning workspaces. + + :ivar value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Workspace] + :ivar next_link: The URI that can be used to request the next list of machine learning + workspaces. + :vartype next_link: str + """ + _attribute_map = { - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'skus': {'key': 'skus', 'type': '[SkuSettings]'}, + 'value': {'key': 'value', 'type': '[Workspace]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, + *, + value: Optional[List["_models.Workspace"]] = None, + next_link: Optional[str] = None, **kwargs ): - super(WorkspaceSku, self).__init__(**kwargs) - self.resource_type = None - self.skus = None + """ + :keyword value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :paramtype value: list[~azure.mgmt.machinelearningservices.models.Workspace] + :keyword next_link: The URI that can be used to request the next list of machine learning + workspaces. + :paramtype next_link: str + """ + super(WorkspaceListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link class WorkspaceUpdateParameters(msrest.serialization.Model): """The parameters for updating a machine learning workspace. - :param tags: A set of tags. The resource tags for the machine learning workspace. - :type tags: dict[str, str] - :param sku: The sku of the workspace. - :type sku: ~azure.mgmt.machinelearningservices.models.Sku - :param description: The description of this workspace. - :type description: str - :param friendly_name: The friendly name for this workspace. - :type friendly_name: str + :ivar tags: A set of tags. The resource tags for the machine learning workspace. + :vartype tags: dict[str, str] + :ivar sku: The sku of the workspace. + :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :ivar description: The description of this workspace. + :vartype description: str + :ivar friendly_name: The friendly name for this workspace. + :vartype friendly_name: str + :ivar image_build_compute: The compute name for image build. + :vartype image_build_compute: str + :ivar service_managed_resources_settings: The service managed resource settings. + :vartype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :ivar primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :vartype primary_user_assigned_identity: str + :ivar public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled", "Disabled". + :vartype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + :ivar application_insights: ARM id of the application insights associated with this workspace. + :vartype application_insights: str + :ivar container_registry: ARM id of the container registry associated with this workspace. + :vartype container_registry: str + :ivar encryption: The encryption settings of the workspace. + :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties """ _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, + 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, + 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, + 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, + 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionUpdateProperties'}, } def __init__( self, *, tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, description: Optional[str] = None, friendly_name: Optional[str] = None, + image_build_compute: Optional[str] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + primary_user_assigned_identity: Optional[str] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + application_insights: Optional[str] = None, + container_registry: Optional[str] = None, + encryption: Optional["_models.EncryptionUpdateProperties"] = None, **kwargs ): + """ + :keyword tags: A set of tags. The resource tags for the machine learning workspace. + :paramtype tags: dict[str, str] + :keyword sku: The sku of the workspace. + :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity + :keyword description: The description of this workspace. + :paramtype description: str + :keyword friendly_name: The friendly name for this workspace. + :paramtype friendly_name: str + :keyword image_build_compute: The compute name for image build. + :paramtype image_build_compute: str + :keyword service_managed_resources_settings: The service managed resource settings. + :paramtype service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :keyword primary_user_assigned_identity: The user assigned identity resource id that represents + the workspace identity. + :paramtype primary_user_assigned_identity: str + :keyword public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled", "Disabled". + :paramtype public_network_access: str or + ~azure.mgmt.machinelearningservices.models.PublicNetworkAccess + :keyword application_insights: ARM id of the application insights associated with this + workspace. + :paramtype application_insights: str + :keyword container_registry: ARM id of the container registry associated with this workspace. + :paramtype container_registry: str + :keyword encryption: The encryption settings of the workspace. + :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties + """ super(WorkspaceUpdateParameters, self).__init__(**kwargs) self.tags = tags self.sku = sku + self.identity = identity self.description = description self.friendly_name = friendly_name + self.image_build_compute = image_build_compute + self.service_managed_resources_settings = service_managed_resources_settings + self.primary_user_assigned_identity = primary_user_assigned_identity + self.public_network_access = public_network_access + self.application_insights = application_insights + self.container_registry = container_registry + self.encryption = encryption diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_patch.py new file mode 100644 index 0000000000000..0ad201a8c586e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py index 516999b100d82..a02156aa11a5b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py @@ -8,28 +8,65 @@ from ._operations import Operations from ._workspaces_operations import WorkspacesOperations -from ._workspace_features_operations import WorkspaceFeaturesOperations -from ._notebooks_operations import NotebooksOperations from ._usages_operations import UsagesOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations from ._quotas_operations import QuotasOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations -from ._machine_learning_compute_operations import MachineLearningComputeOperations -from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin +from ._compute_operations import ComputeOperations from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._batch_endpoints_operations import BatchEndpointsOperations +from ._batch_deployments_operations import BatchDeploymentsOperations +from ._code_containers_operations import CodeContainersOperations +from ._code_versions_operations import CodeVersionsOperations +from ._component_containers_operations import ComponentContainersOperations +from ._component_versions_operations import ComponentVersionsOperations +from ._data_containers_operations import DataContainersOperations +from ._data_versions_operations import DataVersionsOperations +from ._datastores_operations import DatastoresOperations +from ._environment_containers_operations import EnvironmentContainersOperations +from ._environment_versions_operations import EnvironmentVersionsOperations +from ._jobs_operations import JobsOperations +from ._labeling_jobs_operations import LabelingJobsOperations +from ._model_containers_operations import ModelContainersOperations +from ._model_versions_operations import ModelVersionsOperations +from ._online_endpoints_operations import OnlineEndpointsOperations +from ._online_deployments_operations import OnlineDeploymentsOperations +from ._schedules_operations import SchedulesOperations +from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ 'Operations', 'WorkspacesOperations', - 'WorkspaceFeaturesOperations', - 'NotebooksOperations', 'UsagesOperations', 'VirtualMachineSizesOperations', 'QuotasOperations', - 'WorkspaceConnectionsOperations', - 'MachineLearningComputeOperations', - 'AzureMachineLearningWorkspacesOperationsMixin', + 'ComputeOperations', 'PrivateEndpointConnectionsOperations', 'PrivateLinkResourcesOperations', + 'WorkspaceConnectionsOperations', + 'BatchEndpointsOperations', + 'BatchDeploymentsOperations', + 'CodeContainersOperations', + 'CodeVersionsOperations', + 'ComponentContainersOperations', + 'ComponentVersionsOperations', + 'DataContainersOperations', + 'DataVersionsOperations', + 'DatastoresOperations', + 'EnvironmentContainersOperations', + 'EnvironmentVersionsOperations', + 'JobsOperations', + 'LabelingJobsOperations', + 'ModelContainersOperations', + 'ModelVersionsOperations', + 'OnlineEndpointsOperations', + 'OnlineDeploymentsOperations', + 'SchedulesOperations', + 'WorkspaceFeaturesOperations', ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() \ No newline at end of file diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py deleted file mode 100644 index b39b1e769d9ae..0000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.mgmt.core.exceptions import ARMErrorFormat - -from .. import models as _models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class AzureMachineLearningWorkspacesOperationsMixin(object): - - def list_skus( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.SkuListResult"] - """Lists all skus with associated features. - - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either SkuListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuListResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.SkuListResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('SkuListResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py new file mode 100644 index 0000000000000..6920aea966909 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py @@ -0,0 +1,930 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if order_by is not None: + _params['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if top is not None: + _params['$top'] = _SERIALIZER.query("top", top, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + *, + json: Optional[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + *, + json: Optional[_models.BatchDeployment] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class BatchDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`batch_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.BatchDeploymentTrackedResourceArmPaginatedResult]: + """Lists Batch inference deployments in the workspace. + + Lists Batch inference deployments in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Endpoint name. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchDeploymentTrackedResourceArmPaginatedResult + or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BatchDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Delete Batch Inference deployment (asynchronous). + + Delete Batch Inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Endpoint name. + :type endpoint_name: str + :param deployment_name: Inference deployment identifier. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> _models.BatchDeployment: + """Gets a batch inference deployment by id. + + Gets a batch inference deployment by id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Endpoint name. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch deployments. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchDeployment, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + **kwargs: Any + ) -> Optional[_models.BatchDeployment]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.BatchDeployment]] + + _json = self._serialize.body(body, 'PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. + :type deployment_name: str + :param body: Batch inference deployment definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + **kwargs: Any + ) -> _models.BatchDeployment: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + + _json = self._serialize.body(body, 'BatchDeployment') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('BatchDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. + :type deployment_name: str + :param body: Batch inference deployment definition object. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py new file mode 100644 index 0000000000000..015adb824d399 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py @@ -0,0 +1,992 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if count is not None: + _params['count'] = _SERIALIZER.query("count", count, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + *, + json: Optional[_models.PartialMinimalTrackedResourceWithIdentity] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + *, + json: Optional[_models.BatchEndpoint] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_keys_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class BatchEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`batch_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.BatchEndpointTrackedResourceArmPaginatedResult]: + """Lists Batch inference endpoint in the workspace. + + Lists Batch inference endpoint in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchEndpointTrackedResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + count=count, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + count=count, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("BatchEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Delete Batch Inference Endpoint (asynchronous). + + Delete Batch Inference Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.BatchEndpoint: + """Gets a batch inference endpoint by name. + + Gets a batch inference endpoint by name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Name for the Batch Endpoint. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchEndpoint, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> Optional[_models.BatchEndpoint]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.BatchEndpoint]] + + _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + **kwargs: Any + ) -> _models.BatchEndpoint: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + + _json = self._serialize.body(body, 'BatchEndpoint') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.BatchEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('BatchEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def list_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.EndpointAuthKeys: + """Lists batch Inference Endpoint keys. + + Lists batch Inference Endpoint keys. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EndpointAuthKeys] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py new file mode 100644 index 0000000000000..a539e5812556d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py @@ -0,0 +1,528 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + json: Optional[_models.CodeContainer] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class CodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`code_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.CodeContainerResourceArmPaginatedResult]: + """List containers. + + List containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.CodeContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CodeContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.CodeContainer, + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeContainer] + + _json = self._serialize.body(body, 'CodeContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('CodeContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('CodeContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py new file mode 100644 index 0000000000000..1287707b9d573 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py @@ -0,0 +1,569 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if order_by is not None: + _params['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if top is not None: + _params['$top'] = _SERIALIZER.query("top", top, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + *, + json: Optional[_models.CodeVersion] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class CodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`code_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.CodeVersionResourceArmPaginatedResult]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.CodeVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('CodeVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.CodeVersion, + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.CodeVersion] + + _json = self._serialize.body(body, 'CodeVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('CodeVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('CodeVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py new file mode 100644 index 0000000000000..812ddb61e49a0 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_containers_operations.py @@ -0,0 +1,537 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + json: Optional[_models.ComponentContainer] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class ComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`component_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.ComponentContainerResourceArmPaginatedResult]: + """List component containers. + + List component containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.ComponentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComponentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ComponentContainer, + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentContainer] + + _json = self._serialize.body(body, 'ComponentContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ComponentContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ComponentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py new file mode 100644 index 0000000000000..56d79231691d2 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_component_versions_operations.py @@ -0,0 +1,578 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if order_by is not None: + _params['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if top is not None: + _params['$top'] = _SERIALIZER.query("top", top, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + *, + json: Optional[_models.ComponentVersion] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class ComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`component_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.ComponentVersionResourceArmPaginatedResult]: + """List component versions. + + List component versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Component name. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.ComponentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComponentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ComponentVersion, + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComponentVersion] + + _json = self._serialize.body(body, 'ComponentVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ComponentVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ComponentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py new file mode 100644 index 0000000000000..ba79aa8533c4f --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py @@ -0,0 +1,1843 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + *, + json: Optional[_models.ComputeResource] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + *, + json: Optional[_models.ClusterUpdateParameters] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + *, + underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params['underlyingResourceAction'] = _SERIALIZER.query("underlying_resource_action", underlying_resource_action, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_custom_services_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + *, + json: Optional[List[_models.CustomService]] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_nodes_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_keys_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_start_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_stop_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_restart_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_idle_shutdown_setting_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + compute_name: str, + *, + json: Optional[_models.IdleShutdownSetting] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class ComputeOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`compute` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.PaginatedComputeResourcesList]: + """Gets computes in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PaginatedComputeResourcesList or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PaginatedComputeResourcesList] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("PaginatedComputeResourcesList", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> _models.ComputeResource: + """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are + not returned - use 'keys' nested resource to get them. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeResource, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + + _json = self._serialize.body(parameters, 'ComputeResource') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if response.status_code == 201: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ComputeResource', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + **kwargs: Any + ) -> _models.ComputeResource: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + + _json = self._serialize.body(parameters, 'ClusterUpdateParameters') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param parameters: Additional parameters for cluster update. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeResource] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ComputeResource', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + underlying_resource_action=underlying_resource_action, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], + **kwargs: Any + ) -> LROPoller[None]: + """Deletes specified Machine Learning compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the + underlying compute from workspace if 'Detach'. + :type underlying_resource_action: str or + ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + + @distributed_trace + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: List[_models.CustomService], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param custom_services: New list of Custom Services. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + + _json = self._serialize.body(custom_services, '[CustomService]') + + request = build_update_custom_services_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.update_custom_services.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_custom_services.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices"} # type: ignore + + + @distributed_trace + def list_nodes( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> Iterable[_models.AmlComputeNodesInformation]: + """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AmlComputeNodesInformation or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.AmlComputeNodesInformation] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_nodes_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self.list_nodes.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_nodes_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("AmlComputeNodesInformation", pipeline_response) + list_of_elem = deserialized.nodes + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list_nodes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes"} # type: ignore + + @distributed_trace + def list_keys( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> _models.ComputeSecrets: + """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeSecrets, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ComputeSecrets] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeSecrets', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys"} # type: ignore + + + def _start_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_start_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self._start_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + + @distributed_trace + def begin_start( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Posts a start action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._start_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + + def _stop_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_stop_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self._stop_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + + @distributed_trace + def begin_stop( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Posts a stop action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._stop_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + + def _restart_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_restart_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + template_url=self._restart_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _restart_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + + @distributed_trace + def begin_restart( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Posts a restart action to a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._restart_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_restart.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + + @distributed_trace + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.IdleShutdownSetting, + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + + _json = self._serialize.body(parameters, 'IdleShutdownSetting') + + request = build_update_idle_shutdown_setting_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.update_idle_shutdown_setting.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + update_idle_shutdown_setting.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py new file mode 100644 index 0000000000000..b2908ed3c5172 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py @@ -0,0 +1,537 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + json: Optional[_models.DataContainer] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class DataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`data_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.DataContainerResourceArmPaginatedResult]: + """List data containers. + + List data containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.DataContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.DataContainer, + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataContainer] + + _json = self._serialize.body(body, 'DataContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py new file mode 100644 index 0000000000000..6ee8b6cc98696 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py @@ -0,0 +1,591 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if order_by is not None: + _params['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if top is not None: + _params['$top'] = _SERIALIZER.query("top", top, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if tags is not None: + _params['$tags'] = _SERIALIZER.query("tags", tags, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + *, + json: Optional[_models.DataVersionBase] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class DataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`data_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.DataVersionBaseResourceArmPaginatedResult]: + """List data versions in the data container. + + List data versions in the data container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Data container's name. + :type name: str + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. + :type order_by: str + :param top: Top count of results, top count cannot be greater than the page size. + If topCount > page size, results with be default page size count + will be returned. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, + ListViewType.All]View type for including/excluding (for example) archived entities. Default + value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataVersionBaseResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBaseResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + tags=tags, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.DataVersionBase: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataVersionBase] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataVersionBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DataVersionBase] + + _json = self._serialize.body(body, 'DataVersionBase') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataVersionBase', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataVersionBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py new file mode 100644 index 0000000000000..cf3fe86c8d405 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py @@ -0,0 +1,692 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + count: Optional[int] = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: Optional[bool] = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if count is not None: + _params['count'] = _SERIALIZER.query("count", count, 'int') + if is_default is not None: + _params['isDefault'] = _SERIALIZER.query("is_default", is_default, 'bool') + if names is not None: + _params['names'] = _SERIALIZER.query("names", names, '[str]', div=',') + if search_text is not None: + _params['searchText'] = _SERIALIZER.query("search_text", search_text, 'str') + if order_by is not None: + _params['orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if order_by_asc is not None: + _params['orderByAsc'] = _SERIALIZER.query("order_by_asc", order_by_asc, 'bool') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + json: Optional[_models.Datastore] = None, + content: Any = None, + skip_validation: Optional[bool] = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip_validation is not None: + _params['skipValidation'] = _SERIALIZER.query("skip_validation", skip_validation, 'bool') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_secrets_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class DatastoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`datastores` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: Optional[bool] = False, + **kwargs: Any + ) -> Iterable[_models.DatastoreResourceArmPaginatedResult]: + """List datastores. + + List datastores. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is 30. + :type count: int + :param is_default: Filter down to the workspace default datastore. Default value is None. + :type is_default: bool + :param names: Names of datastores to return. Default value is None. + :type names: list[str] + :param search_text: Text to search for in the datastore names. Default value is None. + :type search_text: str + :param order_by: Order by property (createdtime | modifiedtime | name). Default value is None. + :type order_by: str + :param order_by_asc: Order by property in ascending order. Default value is False. + :type order_by_asc: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DatastoreResourceArmPaginatedResult or the result + of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DatastoreResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DatastoreResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + is_default=is_default, + names=names, + search_text=search_text, + order_by=order_by, + order_by_asc=order_by_asc, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + is_default=is_default, + names=names, + search_text=search_text, + order_by=order_by, + order_by_asc=order_by_asc, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("DatastoreResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete datastore. + + Delete datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.Datastore: + """Get datastore. + + Get datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Datastore] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Datastore', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Datastore, + skip_validation: Optional[bool] = False, + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :param body: Datastore entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Datastore] + + _json = self._serialize.body(body, 'Datastore') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + skip_validation=skip_validation, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('Datastore', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Datastore', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + + + @distributed_trace + def list_secrets( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.DatastoreSecrets: + """Get datastore secrets. + + Get datastore secrets. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Datastore name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DatastoreSecrets, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DatastoreSecrets + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.DatastoreSecrets] + + + request = build_list_secrets_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.list_secrets.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DatastoreSecrets', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_secrets.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py new file mode 100644 index 0000000000000..bec98ec57ac0c --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py @@ -0,0 +1,537 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + json: Optional[_models.EnvironmentContainer] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class EnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`environment_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.EnvironmentContainerResourceArmPaginatedResult]: + """List environment containers. + + List environment containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.EnvironmentContainer, + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentContainer] + + _json = self._serialize.body(body, 'EnvironmentContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py new file mode 100644 index 0000000000000..38bcb5bd31a5d --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_versions_operations.py @@ -0,0 +1,578 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if order_by is not None: + _params['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if top is not None: + _params['$top'] = _SERIALIZER.query("top", top, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + *, + json: Optional[_models.EnvironmentVersion] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class EnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`environment_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.EnvironmentVersionResourceArmPaginatedResult]: + """List versions. + + List versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either EnvironmentVersionResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.EnvironmentVersion, + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. + :type name: str + :param version: Version of EnvironmentVersion. + :type version: str + :param body: Definition of EnvironmentVersion. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.EnvironmentVersion] + + _json = self._serialize.body(body, 'EnvironmentVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py new file mode 100644 index 0000000000000..78d877690fcd7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py @@ -0,0 +1,813 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if job_type is not None: + _params['jobType'] = _SERIALIZER.query("job_type", job_type, 'str') + if tag is not None: + _params['tag'] = _SERIALIZER.query("tag", tag, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + if scheduled is not None: + _params['scheduled'] = _SERIALIZER.query("scheduled", scheduled, 'bool') + if schedule_id is not None: + _params['scheduleId'] = _SERIALIZER.query("schedule_id", schedule_id, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + *, + json: Optional[_models.JobBase] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_cancel_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class JobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.JobBaseResourceArmPaginatedResult]: + """Lists Jobs in the workspace. + + Lists Jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param job_type: Type of job to be returned. Default value is None. + :type job_type: str + :param tag: Jobs returned will have this tag key. Default value is None. + :type tag: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :param scheduled: Indicator whether the job is scheduled job. Default value is None. + :type scheduled: bool + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. + :type schedule_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.JobBaseResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.JobBaseResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + job_type=job_type, + tag=tag, + list_view_type=list_view_type, + scheduled=scheduled, + schedule_id=schedule_id, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + job_type=job_type, + tag=tag, + list_view_type=list_view_type, + scheduled=scheduled, + schedule_id=schedule_id, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("JobBaseResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> LROPoller[None]: + """Deletes a Job (asynchronous). + + Deletes a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> _models.JobBase: + """Gets a Job by name/id. + + Gets a Job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.JobBase] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.JobBase, + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :param body: Job definition object. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.JobBase] + + _json = self._serialize.body(body, 'JobBase') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('JobBase', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('JobBase', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + + + def _cancel_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_cancel_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self._cancel_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _cancel_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore + + + @distributed_trace + def begin_cancel( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> LROPoller[None]: + """Cancels a Job (asynchronous). + + Cancels a Job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._cancel_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py new file mode 100644 index 0000000000000..08062800beeba --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py @@ -0,0 +1,1107 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + count: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if count is not None: + _params['count'] = _SERIALIZER.query("count", count, 'int') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + *, + include_job_instructions: Optional[bool] = False, + include_label_categories: Optional[bool] = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if include_job_instructions is not None: + _params['includeJobInstructions'] = _SERIALIZER.query("include_job_instructions", include_job_instructions, 'bool') + if include_label_categories is not None: + _params['includeLabelCategories'] = _SERIALIZER.query("include_label_categories", include_label_categories, 'bool') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + *, + json: Optional[_models.LabelingJob] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_export_labels_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + *, + json: Optional[_models.ExportSummary] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_pause_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_resume_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "id": _SERIALIZER.url("id", id, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`labeling_jobs` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + **kwargs: Any + ) -> Iterable[_models.LabelingJobResourceArmPaginatedResult]: + """Lists labeling jobs in the workspace. + + Lists labeling jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Number of labeling jobs to return. Default value is None. + :type count: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJobResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJobResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + """Delete a labeling job. + + Delete a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + id: str, + include_job_instructions: Optional[bool] = False, + include_label_categories: Optional[bool] = False, + **kwargs: Any + ) -> _models.LabelingJob: + """Gets a labeling job by name/id. + + Gets a labeling job by name/id. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :param include_job_instructions: Boolean value to indicate whether to include JobInstructions + in response. Default value is False. + :type include_job_instructions: bool + :param include_label_categories: Boolean value to indicate Whether to include LabelCategories + in response. Default value is False. + :type include_label_categories: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LabelingJob, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJob] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + include_job_instructions=include_job_instructions, + include_label_categories=include_label_categories, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LabelingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + **kwargs: Any + ) -> _models.LabelingJob: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJob] + + _json = self._serialize.body(body, 'LabelingJob') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('LabelingJob', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('LabelingJob', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :param body: LabelingJob definition object. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.LabelingJob] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LabelingJob', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + + def _export_labels_initial( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + **kwargs: Any + ) -> Optional[_models.ExportSummary]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.ExportSummary]] + + _json = self._serialize.body(body, 'ExportSummary') + + request = build_export_labels_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._export_labels_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('ExportSummary', pipeline_response) + + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _export_labels_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + + @distributed_trace + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). + + Export labels from a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :param body: The export summary. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ExportSummary] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._export_labels_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ExportSummary', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_export_labels.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + + @distributed_trace + def pause( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + """Pause a labeling job. + + Pause a labeling job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_pause_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self.pause.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + pause.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause"} # type: ignore + + + def _resume_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_resume_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + template_url=self._resume_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _resume_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore + + + @distributed_trace + def begin_resume( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + id: str, + **kwargs: Any + ) -> LROPoller[None]: + """Resume a labeling job (asynchronous). + + Resume a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._resume_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + id=id, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_resume.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py deleted file mode 100644 index 10261d10df596..0000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py +++ /dev/null @@ -1,912 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class MachineLearningComputeOperations(object): - """MachineLearningComputeOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list_by_workspace( - self, - resource_group_name, # type: str - workspace_name, # type: str - skiptoken=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PaginatedComputeResourcesList"] - """Gets computes in specified workspace. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedComputeResourcesList"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore - - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are - not returned - use 'keys' nested resource to get them. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeResource, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def _create_or_update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ComputeResource" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ComputeResource') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 201]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if response.status_code == 201: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, response_headers) - - return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def begin_create_or_update( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ComputeResource" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComputeResource"] - """Creates or updates compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify - that it does not exist yet. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Payload with Machine Learning compute definition. - :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._create_or_update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def _update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ClusterUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'ClusterUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def begin_update( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ClusterUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComputeResource"] - """Updates properties of a compute. This call will overwrite a compute if it exists. This is a - nonrecoverable operation. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Additional parameters for cluster update. - :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._update_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('ComputeResource', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def _delete_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - underlying_resource_action, # type: Union[str, "_models.UnderlyingResourceAction"] - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - response_headers = {} - if response.status_code == 202: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - - if cls: - return cls(pipeline_response, None, response_headers) - - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def begin_delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - underlying_resource_action, # type: Union[str, "_models.UnderlyingResourceAction"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] - """Deletes specified Machine Learning compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the - underlying compute from workspace if 'Detach'. - :type underlying_resource_action: str or ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - underlying_resource_action=underlying_resource_action, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore - - def list_nodes( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.AmlComputeNodesInformation" - """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: AmlComputeNodesInformation, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlComputeNodesInformation"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_nodes.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore - - def list_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeSecrets" - """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeSecrets, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeSecrets"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = self._deserialize('ComputeSecrets', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore - - def start( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Posts a start action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.start.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore - - def stop( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Posts a stop action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.stop.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore - - def restart( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Posts a restart action to a compute instance. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.restart.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py new file mode 100644 index 0000000000000..5e8689ddfbbfc --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py @@ -0,0 +1,545 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if count is not None: + _params['count'] = _SERIALIZER.query("count", count, 'int') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + json: Optional[_models.ModelContainer] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class ModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`model_containers` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.ModelContainerResourceArmPaginatedResult]: + """List model containers. + + List model containers. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param count: Maximum number of results to return. Default value is None. + :type count: int + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainerResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelContainerResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + count=count, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + """Delete container. + + Delete container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.ModelContainer: + """Get container. + + Get container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelContainer] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ModelContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ModelContainer, + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param body: Container entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelContainer] + + _json = self._serialize.body(body, 'ModelContainer') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ModelContainer', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ModelContainer', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py new file mode 100644 index 0000000000000..c057085a51111 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py @@ -0,0 +1,628 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if order_by is not None: + _params['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if top is not None: + _params['$top'] = _SERIALIZER.query("top", top, 'int') + if version is not None: + _params['version'] = _SERIALIZER.query("version", version, 'str') + if description is not None: + _params['description'] = _SERIALIZER.query("description", description, 'str') + if offset is not None: + _params['offset'] = _SERIALIZER.query("offset", offset, 'int') + if tags is not None: + _params['tags'] = _SERIALIZER.query("tags", tags, 'str') + if properties is not None: + _params['properties'] = _SERIALIZER.query("properties", properties, 'str') + if feed is not None: + _params['feed'] = _SERIALIZER.query("feed", feed, 'str') + if list_view_type is not None: + _params['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + *, + json: Optional[_models.ModelVersion] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "version": _SERIALIZER.url("version", version, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class ModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`model_versions` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + **kwargs: Any + ) -> Iterable[_models.ModelVersionResourceArmPaginatedResult]: + """List model versions. + + List model versions. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Model name. This is case-sensitive. + :type name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Maximum number of records to return. Default value is None. + :type top: int + :param version: Model version. Default value is None. + :type version: str + :param description: Model description. Default value is None. + :type description: str + :param offset: Number of initial results to skip. Default value is None. + :type offset: int + :param tags: Comma-separated list of tag names (and optionally values). Example: + tag1,tag2=value2. Default value is None. + :type tags: str + :param properties: Comma-separated list of property names (and optionally values). Example: + prop1,prop2=value2. Default value is None. + :type properties: str + :param feed: Name of the feed. Default value is None. + :type feed: str + :param list_view_type: View type for including/excluding (for example) archived entities. + Default value is None. + :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the + result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersionResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelVersionResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + offset=offset, + tags=tags, + properties=properties, + feed=feed, + list_view_type=list_view_type, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + skip=skip, + order_by=order_by, + top=top, + version=version, + description=description, + offset=offset, + tags=tags, + properties=properties, + feed=feed, + list_view_type=list_view_type, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions"} # type: ignore + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> None: + """Delete version. + + Delete version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + **kwargs: Any + ) -> _models.ModelVersion: + """Get version. + + Get version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelVersion] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ModelVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + + + @distributed_trace + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ModelVersion, + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Container name. This is case-sensitive. + :type name: str + :param version: Version identifier. This is case-sensitive. + :type version: str + :param body: Version entity to create or update. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.ModelVersion] + + _json = self._serialize.body(body, 'ModelVersion') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + version=version, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('ModelVersion', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('ModelVersion', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_notebooks_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_notebooks_operations.py deleted file mode 100644 index 35f06ab915b04..0000000000000 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_notebooks_operations.py +++ /dev/null @@ -1,166 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error -from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod -from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling - -from .. import models as _models - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class NotebooksOperations(object): - """NotebooksOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ - - models = _models - - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def _prepare_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.NotebookResourceInfo"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.NotebookResourceInfo"]] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._prepare_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200, 202]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized - _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore - - def begin_prepare( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.NotebookResourceInfo"] - """prepare. - - :param resource_group_name: Name of the resource group in which workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] - :raises ~azure.core.exceptions.HttpResponseError: - """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookResourceInfo"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._prepare_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - cls=lambda x,y,z: x, - **kwargs - ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py new file mode 100644 index 0000000000000..baeb08c0f3939 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py @@ -0,0 +1,1220 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if order_by is not None: + _params['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + if top is not None: + _params['$top'] = _SERIALIZER.query("top", top, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + *, + json: Optional[_models.PartialMinimalTrackedResourceWithSku] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + *, + json: Optional[_models.OnlineDeployment] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_get_logs_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + *, + json: Optional[_models.DeploymentLogsRequest] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_skus_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if count is not None: + _params['count'] = _SERIALIZER.query("count", count, 'int') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class OnlineDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`online_deployments` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.OnlineDeploymentTrackedResourceArmPaginatedResult]: + """List Inference Endpoint Deployments. + + List Inference Endpoint Deployments. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param order_by: Ordering of list. Default value is None. + :type order_by: str + :param top: Top of list. Default value is None. + :type top: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult + or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeploymentTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + order_by=order_by, + top=top, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineDeploymentTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Delete Inference Endpoint Deployment (asynchronous). + + Delete Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + **kwargs: Any + ) -> _models.OnlineDeployment: + """Get Inference Deployment Deployment. + + Get Inference Deployment Deployment. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineDeployment, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + **kwargs: Any + ) -> Optional[_models.OnlineDeployment]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.OnlineDeployment]] + + _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithSku') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + **kwargs: Any + ) -> _models.OnlineDeployment: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + + _json = self._serialize.body(body, 'OnlineDeployment') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineDeployment] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineDeployment', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + + @distributed_trace + def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.DeploymentLogsRequest, + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DeploymentLogs] + + _json = self._serialize.body(body, 'DeploymentLogsRequest') + + request = build_get_logs_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.get_logs.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DeploymentLogs', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_logs.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs"} # type: ignore + + + @distributed_trace + def list_skus( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.SkuResourceArmPaginatedResult]: + """List Inference Endpoint Deployment Skus. + + List Inference Endpoint Deployment Skus. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param count: Number of Skus to be retrieved in a page of results. Default value is None. + :type count: int + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuResourceArmPaginatedResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.SkuResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_skus_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + count=count, + skip=skip, + template_url=self.list_skus.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_skus_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + deployment_name=deployment_name, + api_version=api_version, + count=count, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list_skus.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py new file mode 100644 index 0000000000000..3bb6263798279 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py @@ -0,0 +1,1339 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, "_models.EndpointComputeType"]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, "_models.OrderString"]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if name is not None: + _params['name'] = _SERIALIZER.query("name", name, 'str') + if count is not None: + _params['count'] = _SERIALIZER.query("count", count, 'int') + if compute_type is not None: + _params['computeType'] = _SERIALIZER.query("compute_type", compute_type, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + if tags is not None: + _params['tags'] = _SERIALIZER.query("tags", tags, 'str') + if properties is not None: + _params['properties'] = _SERIALIZER.query("properties", properties, 'str') + if order_by is not None: + _params['orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + *, + json: Optional[_models.PartialMinimalTrackedResourceWithIdentity] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + *, + json: Optional[_models.OnlineEndpoint] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_keys_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_regenerate_keys_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + *, + json: Optional[_models.RegenerateEndpointKeysRequest] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_get_token_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class OnlineEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`online_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, "_models.EndpointComputeType"]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, "_models.OrderString"]] = None, + **kwargs: Any + ) -> Iterable[_models.OnlineEndpointTrackedResourceArmPaginatedResult]: + """List Online Endpoints. + + List Online Endpoints. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Name of the endpoint. Default value is None. + :type name: str + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. + :type count: int + :param compute_type: EndpointComputeType to be filtered by. Default value is None. + :type compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :param tags: A set of tags with which to filter the returned models. It is a comma separated + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. + :type tags: str + :param properties: A set of properties with which to filter the returned models. It is a comma + separated string of properties key and/or properties key=value Example: + propKey1,propKey2,propKey3=value3 . Default value is None. + :type properties: str + :param order_by: The option to order the response. Default value is None. + :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or + the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpointTrackedResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + name=name, + count=count, + compute_type=compute_type, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + name=name, + count=count, + compute_type=compute_type, + skip=skip, + tags=tags, + properties=properties, + order_by=order_by, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("OnlineEndpointTrackedResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Delete Online Endpoint (asynchronous). + + Delete Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.OnlineEndpoint: + """Get Online Endpoint. + + Get Online Endpoint. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OnlineEndpoint, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> Optional[_models.OnlineEndpoint]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.OnlineEndpoint]] + + _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + **kwargs: Any + ) -> _models.OnlineEndpoint: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + + _json = self._serialize.body(body, 'OnlineEndpoint') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.OnlineEndpoint] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + + @distributed_trace + def list_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.EndpointAuthKeys: + """List EndpointAuthKeys for an Endpoint using Key-based authentication. + + List EndpointAuthKeys for an Endpoint using Key-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthKeys, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EndpointAuthKeys] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys"} # type: ignore + + + def _regenerate_keys_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + + _json = self._serialize.body(body, 'RegenerateEndpointKeysRequest') + + request = build_regenerate_keys_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._regenerate_keys_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _regenerate_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + + @distributed_trace + def begin_regenerate_keys( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + **kwargs: Any + ) -> LROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param body: RegenerateKeys request . + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._regenerate_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_regenerate_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + + @distributed_trace + def get_token( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + **kwargs: Any + ) -> _models.EndpointAuthToken: + """Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + Retrieve a valid AAD token for an Endpoint using AMLToken-based authentication. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EndpointAuthToken, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthToken + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.EndpointAuthToken] + + + request = build_get_token_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + endpoint_name=endpoint_name, + api_version=api_version, + template_url=self.get_token.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('EndpointAuthToken', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py index ee7327b7cc203..195d4aed47d15 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,87 +6,124 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._vendor import _convert_request +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.MachineLearningServices/operations") -class Operations(object): - """Operations operations. + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def list( self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.OperationListResult"] + **kwargs: Any + ) -> Iterable[_models.AmlOperationListResult]: """Lists all of the available Azure Machine Learning Workspaces REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OperationListResult] + :return: An iterator like instance of either AmlOperationListResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlOperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.AmlOperationListResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.get(url, query_parameters, header_parameters) else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("AmlOperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -94,17 +132,22 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) - list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore + list.metadata = {'url': "/providers/Microsoft.MachineLearningServices/operations"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_patch.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_patch.py new file mode 100644 index 0000000000000..0ad201a8c586e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_patch.py @@ -0,0 +1,19 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py index 33fa5932b5525..442feeb7bc797 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,58 +6,310 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse -from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat -from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + resource_group_name: str, + workspace_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections") # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + *, + json: Optional[_models.PrivateEndpointConnection] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`private_endpoint_connections` attribute. + """ - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + models = _models -class PrivateEndpointConnectionsOperations(object): - """PrivateEndpointConnectionsOperations operations. + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - """ + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> Iterable[_models.PrivateEndpointConnectionListResult]: + """List all the private endpoint connections associated with the workspace. - models = _models + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result + of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnectionListResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def get_next(next_link=None): + request = prepare_request(next_link) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + + @distributed_trace def get( self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Gets the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -68,39 +321,41 @@ def get( :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnection] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) @@ -109,20 +364,22 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - def put( + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + + @distributed_trace + def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - properties, # type: "_models.PrivateEndpointConnection" - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + properties: _models.PrivateEndpointConnection, + **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Update the state of specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -136,44 +393,45 @@ def put( :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.put.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(properties, 'PrivateEndpointConnection') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateEndpointConnection] + + _json = self._serialize.body(properties, 'PrivateEndpointConnection') + + request = build_create_or_update_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create_or_update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) @@ -182,67 +440,21 @@ def put( return cls(pipeline_response, deserialized, {}) return deserialized - put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - - def _delete_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - if response.status_code not in [200, 202, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - - if cls: - return cls(pipeline_response, None, {}) - - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore - - def begin_delete( + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> None: """Deletes the specified private endpoint connection associated with the workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -250,55 +462,49 @@ def begin_delete( with the workspace. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either None or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval + + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] - if cont_token is None: - raw_result = self._delete_initial( - resource_group_name=resource_group_name, - workspace_name=workspace_name, - private_endpoint_connection_name=private_endpoint_connection_name, - cls=lambda x,y,z: x, - **kwargs - ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - def get_long_running_output(pipeline_response): - if cls: - return cls(pipeline_response, None, {}) + if cls: + return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - if cont_token: - return LROPoller.from_continuation_token( - polling_method=polling_method, - continuation_token=cont_token, - client=self._client, - deserialization_callback=get_long_running_output - ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py index 89f73bf100c5c..0aa967bb5dac7 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,55 +6,92 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PrivateLinkResourcesOperations(object): - """PrivateLinkResourcesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + - def list_by_workspace( + @distributed_trace + def list( self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateLinkResourceListResult" + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.PrivateLinkResourceListResult: """Gets the private link resources that need to be created for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -62,38 +100,41 @@ def list_by_workspace( :rtype: ~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.PrivateLinkResourceListResult] + + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) @@ -101,4 +142,6 @@ def list_by_workspace( return cls(pipeline_response, deserialized, {}) return deserialized - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py index 2e50f583b164e..72d9a9ce7fcfe 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,53 +6,131 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_update_request( + location: str, + subscription_id: str, + *, + json: Optional[_models.QuotaUpdateParameters] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas") # pylint: disable=line-too-long + path_format_arguments = { + "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class QuotasOperations(object): - """QuotasOperations operations. +def build_list_request( + subscription_id: str, + location: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class QuotasOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`quotas` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def update( self, - location, # type: str - parameters, # type: "_models.QuotaUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.UpdateWorkspaceQuotasResult" + location: str, + parameters: _models.QuotaUpdateParameters, + **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: """Update quota for each VM family in workspace. :param location: The location for update quota is queried. @@ -63,42 +142,43 @@ def update( :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UpdateWorkspaceQuotasResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'QuotaUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.UpdateWorkspaceQuotasResult] + + _json = self._serialize.body(parameters, 'QuotaUpdateParameters') + + request = build_update_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.update.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) @@ -107,57 +187,67 @@ def update( return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore + update.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas"} # type: ignore + + + @distributed_trace def list( self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListWorkspaceQuotas"] + location: str, + **kwargs: Any + ) -> Iterable[_models.ListWorkspaceQuotas]: """Gets the currently assigned Workspace Quotas based on VMFamily. :param location: The location for which resource usage is queried. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceQuotas"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListWorkspaceQuotas] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response) + deserialized = self._deserialize("ListWorkspaceQuotas", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -166,16 +256,22 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore + list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py new file mode 100644 index 0000000000000..495a0426a9d4f --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_schedules_operations.py @@ -0,0 +1,671 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + name: str, + *, + json: Optional[_models.Schedule] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + +class SchedulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`schedules` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + + @distributed_trace + def list( + self, + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.ScheduleResourceArmPaginatedResult]: + """List schedules in specified workspace. + + List schedules in specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ScheduleResourceArmPaginatedResult or the result + of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ScheduleResourceArmPaginatedResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ScheduleResourceArmPaginatedResult] + + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("ScheduleResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> LROPoller[None]: + """Delete schedule. + + Delete schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Schedule name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + @distributed_trace + def get( + self, + resource_group_name: str, + workspace_name: str, + name: str, + **kwargs: Any + ) -> _models.Schedule: + """Get schedule. + + Get schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Schedule name. + :type name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Schedule, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Schedule + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Schedule] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Schedule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + + def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Schedule, + **kwargs: Any + ) -> _models.Schedule: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Schedule] + + _json = self._serialize.body(body, 'Schedule') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('Schedule', pipeline_response) + + if response.status_code == 201: + response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + + deserialized = self._deserialize('Schedule', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Schedule, + **kwargs: Any + ) -> LROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Schedule name. + :type name: str + :param body: Schedule definition. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Schedule or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Schedule] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + name=name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Schedule', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py index a300468995d3f..63bb185bfaded 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,52 +6,87 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + location: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class UsagesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class UsagesOperations(object): - """UsagesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`usages` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def list( self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListUsagesResult"] + location: str, + **kwargs: Any + ) -> Iterable[_models.ListUsagesResult]: """Gets the current usage information as well as limits for AML resources for given subscription and location. @@ -58,43 +94,51 @@ def list( :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ListUsagesResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListUsagesResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListUsagesResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + location=location, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ListUsagesResult', pipeline_response) + deserialized = self._deserialize("ListUsagesResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -103,16 +147,22 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore + list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py index 44557ed772545..87265dcf34447 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,101 +6,129 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + location: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes") # pylint: disable=line-too-long + path_format_arguments = { + "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class VirtualMachineSizesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class VirtualMachineSizesOperations(object): - """VirtualMachineSizesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`virtual_machine_sizes` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def list( self, - location, # type: str - compute_type=None, # type: Optional[str] - recommended=None, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> "_models.VirtualMachineSizeListResult" + location: str, + **kwargs: Any + ) -> _models.VirtualMachineSizeListResult: """Returns supported VM Sizes in a location. :param location: The location upon which virtual-machine-sizes is queried. :type location: str - :param compute_type: Type of compute to filter by. - :type compute_type: str - :param recommended: Specifies whether to return recommended vm sizes or all vm sizes. - :type recommended: bool :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualMachineSizeListResult, or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if compute_type is not None: - query_parameters['compute-type'] = self._serialize.query("compute_type", compute_type, 'str') - if recommended is not None: - query_parameters['recommended'] = self._serialize.query("recommended", recommended, 'bool') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.VirtualMachineSizeListResult] + + + request = build_list_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) @@ -107,4 +136,6 @@ def list( return cls(pipeline_response, deserialized, {}) return deserialized - list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore + + list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes"} # type: ignore + diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py index 71e2c3c11b36d..c682061245c51 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,325 +6,520 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_create_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + connection_name: str, + *, + json: Optional[_models.WorkspaceConnectionPropertiesV2BasicResource] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + connection_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_delete_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + connection_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + if target is not None: + _params['target'] = _SERIALIZER.query("target", target, 'str') + if category is not None: + _params['category'] = _SERIALIZER.query("category", category, 'str') + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class WorkspaceConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class WorkspaceConnectionsOperations(object): - """WorkspaceConnectionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`workspace_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def list( + + @distributed_trace + def create( self, - resource_group_name, # type: str - workspace_name, # type: str - target=None, # type: Optional[str] - category=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PaginatedWorkspaceConnectionsList"] - """List all connections under a AML workspace. - - :param resource_group_name: Name of the resource group in which workspace is located. + resource_group_name: str, + workspace_name: str, + connection_name: str, + parameters: _models.WorkspaceConnectionPropertiesV2BasicResource, + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """create. + + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param target: Target of the workspace connection. - :type target: str - :param category: Category of the workspace connection. - :type category: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :param parameters: The object for creating or updating a new workspace connection. + :type parameters: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedWorkspaceConnectionsList] + :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedWorkspaceConnectionsList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + _json = self._serialize.body(parameters, 'WorkspaceConnectionPropertiesV2BasicResource') + + request = build_create_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self.create.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if target is not None: - query_parameters['target'] = self._serialize.query("target", target, 'str') - if category is not None: - query_parameters['category'] = self._serialize.query("category", category, 'str') - - request = self._client.get(url, query_parameters, header_parameters) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) - return request + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response - def extract_data(pipeline_response): - deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return None, iter(list_of_elem) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - def get_next(next_link=None): - request = prepare_request(next_link) + deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + if cls: + return cls(pipeline_response, deserialized, {}) - if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + return deserialized - return pipeline_response + create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore - def create( + @distributed_trace + def get( self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - parameters, # type: "_models.WorkspaceConnectionDto" - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnection" - """Add a new workspace connection. - - :param resource_group_name: Name of the resource group in which workspace is located. + resource_group_name: str, + workspace_name: str, + connection_name: str, + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """get. + + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param connection_name: Friendly name of the workspace connection. :type connection_name: str - :param parameters: The object for creating or updating a new workspace connection. - :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionDto :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection + :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.create.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore - def get( + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + + + @distributed_trace + def delete( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnection" - """Get the detail of a workspace connection. - - :param resource_group_name: Name of the resource group in which workspace is located. + resource_group_name: str, + workspace_name: str, + connection_name: str, + **kwargs: Any + ) -> None: + """delete. + + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param connection_name: Friendly name of the workspace connection. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnection, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection + :return: None, or the result of cls(response) + :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop('error_map', {}) or {}) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + + request = build_delete_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + api_version=api_version, + template_url=self.delete.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnection', pipeline_response) - if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, None, {}) - return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - def delete( + + @distributed_trace + def list( self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - """Delete a workspace connection. - - :param resource_group_name: Name of the resource group in which workspace is located. + resource_group_name: str, + workspace_name: str, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult]: + """list. + + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. - :type connection_name: str + :param target: Target of the workspace connection. Default value is None. + :type target: str + :param category: Category of the workspace connection. Default value is None. + :type category: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None + :return: An iterator like instance of either + WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult or the result of cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[None] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.delete.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop('error_map', {}) or {}) + def prepare_request(next_link=None): + if not next_link: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + target=target, + category=category, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + else: + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + target=target, + category=category, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + def extract_data(pipeline_response): + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + def get_next(next_link=None): + request = prepare_request(next_link) - if response.status_code not in [200, 204]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response - if cls: - return cls(pipeline_response, None, {}) + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + return pipeline_response + + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py index ae71b091b5f75..296bffe797a7e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,99 +6,146 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class WorkspaceFeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class WorkspaceFeaturesOperations(object): - """WorkspaceFeaturesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`workspace_features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListAmlUserFeatureResult"] + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> Iterable[_models.ListAmlUserFeatureResult]: """Lists all enabled features for a workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] + :return: An iterator like instance of either ListAmlUserFeatureResult or the result of + cls(response) + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAmlUserFeatureResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListAmlUserFeatureResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response) + deserialized = self._deserialize("ListAmlUserFeatureResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -106,17 +154,22 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore + list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features"} # type: ignore diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py index c1aa91f9fc113..12cf7c5caf9fb 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,58 +6,602 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast + +from msrest import Serializer from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._vendor import _convert_request, _format_url_section +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + +def build_get_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_create_or_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + json: Optional[_models.Workspace] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PUT", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_delete_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="DELETE", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_update_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + json: Optional[_models.WorkspaceUpdateParameters] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="PATCH", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_by_resource_group_request( + subscription_id: str, + resource_group_name: str, + *, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_diagnose_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + json: Optional[_models.DiagnoseWorkspaceParameters] = None, + content: Any = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', None)) # type: Optional[str] + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + if content_type is not None: + _headers['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + json=json, + content=content, + **kwargs + ) + + +def build_list_keys_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_resync_keys_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_by_subscription_request( + subscription_id: str, + *, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if skip is not None: + _params['$skip'] = _SERIALIZER.query("skip", skip, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_notebook_access_token_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_prepare_notebook_request_initial( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_storage_account_keys_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_notebook_keys_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="POST", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + + +def build_list_outbound_network_dependencies_endpoints_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + accept = _headers.pop('Accept', "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints") # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + + # Construct headers + _headers['Accept'] = _SERIALIZER.header("accept", accept, 'str') + + return HttpRequest( + method="GET", + url=_url, + params=_params, + headers=_headers, + **kwargs + ) + +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class WorkspacesOperations(object): - """WorkspacesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningWorkspaces`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace def get( self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Workspace" + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.Workspace: """Gets the properties of the specified machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -65,38 +610,40 @@ def get( :rtype: ~azure.mgmt.machinelearningservices.models.Workspace :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.Workspace] + + + request = build_get_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.get.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Workspace', pipeline_response) @@ -105,79 +652,79 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.Workspace"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + resource_group_name: str, + workspace_name: str, + parameters: _models.Workspace, + **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'Workspace') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.Workspace]] + + _json = self._serialize.body(parameters, 'Workspace') + + request = build_create_or_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._create_or_update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response - if response.status_code not in [200, 201, 202]: + if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('Workspace', pipeline_response) - if response.status_code == 201: - deserialized = self._deserialize('Workspace', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + + @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Workspace"] + resource_group_name: str, + workspace_name: str, + parameters: _models.Workspace, + **kwargs: Any + ) -> LROPoller[_models.Workspace]: """Creates or updates a workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -185,48 +732,57 @@ def begin_create_or_update( :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Workspace] polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = self._create_or_update_initial( + raw_result = self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, + api_version=api_version, + content_type=content_type, cls=lambda x,y,z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('Workspace', pipeline_response) - if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( @@ -235,109 +791,118 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - def _delete_initial( + begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = build_delete_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self._delete_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + - def begin_delete( + @distributed_trace + def begin_delete( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> LROPoller[None]: """Deletes a machine learning workspace. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises ~azure.core.exceptions.HttpResponseError: + :raises: ~azure.core.exceptions.HttpResponseError """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, + api_version=api_version, cls=lambda x,y,z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( @@ -346,132 +911,211 @@ def get_long_running_output(pipeline_response): client=self._client, deserialization_callback=get_long_running_output ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: _models.WorkspaceUpdateParameters, + **kwargs: Any + ) -> Optional[_models.Workspace]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.Workspace]] + + _json = self._serialize.body(parameters, 'WorkspaceUpdateParameters') + + request = build_update_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._update_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + - def update( + @distributed_trace + def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.WorkspaceUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.Workspace" + resource_group_name: str, + workspace_name: str, + parameters: _models.WorkspaceUpdateParameters, + **kwargs: Any + ) -> LROPoller[_models.Workspace]: """Updates a machine learning workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :param parameters: The parameters for updating a machine learning workspace. :type parameters: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) - :rtype: ~azure.mgmt.machinelearningservices.models.Workspace + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self.update.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.Workspace] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) - deserialized = self._deserialize('Workspace', pipeline_response) + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Workspace', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized - if cls: - return cls(pipeline_response, deserialized, {}) - return deserialized - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + + @distributed_trace def list_by_resource_group( self, - resource_group_name, # type: str - skiptoken=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + resource_group_name: str, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.WorkspaceListResult]: """Lists all the available machine learning workspaces under the specified resource group. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceListResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + api_version=api_version, + skip=skip, + template_url=self.list_by_resource_group.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -480,32 +1124,189 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + def _diagnose_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + **kwargs: Any + ) -> Optional[_models.DiagnoseResponseResult]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.DiagnoseResponseResult]] + + if parameters is not None: + _json = self._serialize.body(parameters, 'DiagnoseWorkspaceParameters') + else: + _json = None + + request = build_diagnose_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + content_type=content_type, + json=_json, + template_url=self._diagnose_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + + if response.status_code == 202: + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + + _diagnose_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + + @distributed_trace + def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + parameters: Optional[_models.DiagnoseWorkspaceParameters] = None, + **kwargs: Any + ) -> LROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param parameters: The parameter of diagnosing workspace health. Default value is None. + :type parameters: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DiagnoseResponseResult or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + content_type = kwargs.pop('content_type', _headers.pop('Content-Type', "application/json")) # type: Optional[str] + cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnoseResponseResult] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._diagnose_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_diagnose.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + + @distributed_trace def list_keys( self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ListWorkspaceKeysResult" + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.ListWorkspaceKeysResult: """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -514,38 +1315,40 @@ def list_keys( :rtype: ~azure.mgmt.machinelearningservices.models.ListWorkspaceKeysResult :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceKeysResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.list_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListWorkspaceKeysResult] + + + request = build_list_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) @@ -554,116 +1357,188 @@ def list_keys( return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore - def resync_keys( + list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys"} # type: ignore + + + def _resync_keys_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + + + request = build_resync_keys_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self._resync_keys_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _resync_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + + + @distributed_trace + def begin_resync_keys( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> LROPoller[None]: """Resync all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. - :param resource_group_name: Name of the resource group in which workspace is located. + :param resource_group_name: The name of the resource group. The name is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) - :rtype: None + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - - # Construct URL - url = self.resync_keys.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[None] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._resync_keys_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) - request = self._client.post(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.MachineLearningServiceError, response) - raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, None, {}) + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore + begin_resync_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + @distributed_trace def list_by_subscription( self, - skiptoken=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable[_models.WorkspaceListResult]: """Lists all the available machine learning workspaces under the specified subscription. - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] + :rtype: + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.WorkspaceListResult] + error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2020-08-01" - accept = "application/json" - + error_map.update(kwargs.pop('error_map', {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + skip=skip, + template_url=self.list_by_subscription.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + skip=skip, + template_url=next_link, + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -672,17 +1547,412 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.MachineLearningServiceError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response + return ItemPaged( get_next, extract_data ) - list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + list_by_subscription.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + + @distributed_trace + def list_notebook_access_token( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.NotebookAccessTokenResult: + """return notebook access token and refresh token. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: NotebookAccessTokenResult, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.NotebookAccessTokenResult] + + + request = build_list_notebook_access_token_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_notebook_access_token.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_access_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken"} # type: ignore + + + def _prepare_notebook_initial( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> Optional[_models.NotebookResourceInfo]: + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[Optional[_models.NotebookResourceInfo]] + + + request = build_prepare_notebook_request_initial( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self._prepare_notebook_initial.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _prepare_notebook_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + + @distributed_trace + def begin_prepare_notebook( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> LROPoller[_models.NotebookResourceInfo]: + """Prepare a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] + :raises: ~azure.core.exceptions.HttpResponseError + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.NotebookResourceInfo] + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._prepare_notebook_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + cls=lambda x,y,z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop('error_map', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling( + lro_delay, + lro_options={'final-state-via': 'location'}, + + **kwargs + )) # type: PollingMethod + elif polling is False: polling_method = cast(PollingMethod, NoPolling()) + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_prepare_notebook.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + + @distributed_trace + def list_storage_account_keys( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: + """List storage account keys of a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListStorageAccountKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListStorageAccountKeysResult] + + + request = build_list_storage_account_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_storage_account_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_storage_account_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys"} # type: ignore + + + @distributed_trace + def list_notebook_keys( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.ListNotebookKeysResult: + """List keys of a notebook. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListNotebookKeysResult, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ListNotebookKeysResult] + + + request = build_list_notebook_keys_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_notebook_keys.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_notebook_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys"} # type: ignore + + + @distributed_trace + def list_outbound_network_dependencies_endpoints( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> _models.ExternalFQDNResponse: + """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) + programmatically. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExternalFQDNResponse, or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop('api_version', _params.pop('api-version', "2022-06-01-preview")) # type: str + cls = kwargs.pop('cls', None) # type: ClsType[_models.ExternalFQDNResponse] + + + request = build_list_outbound_network_dependencies_endpoints_request( + subscription_id=self._config.subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=api_version, + template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, + stream=False, + **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ExternalFQDNResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list_outbound_network_dependencies_endpoints.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore +