diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/CHANGELOG.md b/sdk/documenttranslation/azure-ai-documenttranslation/CHANGELOG.md new file mode 100644 index 000000000000..332564950c28 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/CHANGELOG.md @@ -0,0 +1,3 @@ +# Release History + +## 1.0.0b1 (Unreleased) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/MANIFEST.in b/sdk/documenttranslation/azure-ai-documenttranslation/MANIFEST.in new file mode 100644 index 000000000000..ded513877297 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/MANIFEST.in @@ -0,0 +1,5 @@ +recursive-include tests *.py +recursive-include samples *.py *.md +include *.md +include azure/__init__.py +include azure/ai/__init__.py diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/README.md b/sdk/documenttranslation/azure-ai-documenttranslation/README.md new file mode 100644 index 000000000000..92c7ca73c592 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/README.md @@ -0,0 +1,35 @@ +[![Build Status](https://dev.azure.com/azure-sdk/public/_apis/build/status/azure-sdk-for-python.client?branchName=master)](https://dev.azure.com/azure-sdk/public/_build/latest?definitionId=46?branchName=master) + +# Azure Template Package client library for Python + +This template package matches necessary patterns that the development team has established to create a unified sdk functional from Python 2.7 onwards. The packages contained herein can be installed singly or as part of the `azure` namespace. Any other introductory text should go here. + +This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8 + +For a more complete set of Azure libraries, see https://aka.ms/azsdk/python/all + +# Getting started + +For a rich example of a well formatted readme, please check [here.](https://github.com/Azure/azure-sdk/blob/master/docs/policies/README-TEMPLATE.md) In addition, this is an [example readme](https://github.com/Azure/azure-sdk/blob/master/docs/policies/README-EXAMPLE.md) that should be emulated. Note that the top-level sections in this template align with that of the [template.](https://github.com/Azure/azure-sdk/blob/master/docs/policies/README-TEMPLATE.md) + +# Key concepts + +Bullet point list of your library's main concepts. + +# Examples + +Examples of some of the key concepts for your library. + +# Troubleshooting + +Running into issues? This section should contain details as to what to do there. + +# Next steps + +More sample code should go here, along with links out to the appropriate example tests. + +# Contributing + +If you encounter any bugs or have suggestions, please file an issue in the [Issues]() section of the project. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fsdk%2Ftemplate%2Fazure-template%2FREADME.png) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/__init__.py new file mode 100644 index 000000000000..5960c353a898 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/__init__.py new file mode 100644 index 000000000000..5960c353a898 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py new file mode 100644 index 000000000000..6aa71f6e8164 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py @@ -0,0 +1,37 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from ._version import VERSION +from ._client import DocumentTranslationClient +from ._generated.models import ( + StorageInputType, +) +from ._api_version import DocumentTranslationVersion +from ._models import ( + StorageTarget, + JobStatusDetail, + DocumentStatusDetail, + DocumentTranslationError, + TranslationGlossary, + BatchDocumentInput, + FileFormat +) + +__VERSION__ = VERSION + + +__all__ = [ + "DocumentTranslationClient", + "DocumentTranslationVersion", + "BatchDocumentInput", + "TranslationGlossary", + "StorageInputType", + "FileFormat", + "StorageTarget", + "JobStatusDetail", + "DocumentStatusDetail", + "DocumentTranslationError", +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py new file mode 100644 index 000000000000..0f162daff872 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py @@ -0,0 +1,28 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from enum import Enum + + +class DocumentTranslationVersion(str, Enum): + """Document Translation API versions supported by this package""" + + #: This is the default version + V1_0_PREVIEW = "1.0-preview.1" + + +def validate_api_version(api_version): + # type: (str) -> None + """Raise ValueError if api_version is invalid """ + if not api_version: + return + + try: + api_version = DocumentTranslationVersion(api_version) + except ValueError: + raise ValueError( + "Unsupported API version '{}'. Please select from:\n{}".format( + api_version, ", ".join(v.value for v in DocumentTranslationVersion)) + ) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py new file mode 100644 index 000000000000..e48ec7f1148b --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py @@ -0,0 +1,165 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from typing import Union, Any, TYPE_CHECKING, List +from azure.core.tracing.decorator import distributed_trace +from ._generated import BatchDocumentTranslationClient as _BatchDocumentTranslationClient +from ._helpers import get_authentication_policy +from ._user_agent import USER_AGENT +if TYPE_CHECKING: + from azure.core.paging import ItemPaged + from azure.core.credentials import AzureKeyCredential, TokenCredential + from ._models import JobStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat + + +class DocumentTranslationClient(object): + """DocumentTranslationClient + + """ + + def __init__(self, endpoint, credential, **kwargs): + # type: (str, Union[AzureKeyCredential, TokenCredential], **Any) -> None + """ + + :param str endpoint: + :param credential: + :type credential: Union[AzureKeyCredential, TokenCredential] + :keyword str api_version: + """ + self._endpoint = endpoint + self._credential = credential + self._api_version = kwargs.pop('api_version', None) + + authentication_policy = get_authentication_policy(credential) + self._client = _BatchDocumentTranslationClient( + endpoint=endpoint, + credential=credential, # type: ignore + api_version=self._api_version, + sdk_moniker=USER_AGENT, + authentication_policy=authentication_policy, + polling_interval=5, # TODO what is appropriate polling interval + **kwargs + ) + + @distributed_trace + def create_translation_job(self, batch, **kwargs): + # type: (List[BatchDocumentInput], **Any) -> JobStatusDetail + """ + + :param batch: + :type batch: List[~azure.ai.documenttranslation.BatchDocumentInput] + :return: JobStatusDetail + :rtype: JobStatusDetail + """ + + return self._client.document_translation.begin_submit_batch_request( + inputs=batch, + polling=True, + **kwargs + ) + + @distributed_trace + def get_job_status(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail + """ + + :param job_id: guid id for job + :type job_id: str + :rtype: ~azure.ai.documenttranslation.JobStatusDetail + """ + + return self._client.document_translation.get_operation_status(job_id, **kwargs) + + @distributed_trace + def cancel_job(self, job_id, **kwargs): + # type: (str, **Any) -> None + """ + + :param job_id: guid id for job + :type job_id: str + :rtype: None + """ + + self._client.document_translation.cancel_operation(job_id, **kwargs) + + @distributed_trace + def wait_until_done(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail + """ + + :param job_id: guid id for job + :type job_id: str + :return: JobStatusDetail + :rtype: JobStatusDetail + """ + pass + + @distributed_trace + def list_submitted_jobs(self, **kwargs): + # type: (**Any) -> ItemPaged[JobStatusDetail] + """ + + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.polling.ItemPaged[JobStatusDetail] + """ + return self._client.document_translation.get_operations(**kwargs) + + @distributed_trace + def list_documents_statuses(self, job_id, **kwargs): + # type: (str, **Any) -> ItemPaged[DocumentStatusDetail] + """ + + :param job_id: guid id for job + :type job_id: str + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.paging.ItemPaged[DocumentStatusDetail] + """ + + return self._client.document_translation.get_operation_documents_status(job_id, **kwargs) + + @distributed_trace + def get_document_status(self, job_id, document_id, **kwargs): + # type: (str, str, **Any) -> DocumentStatusDetail + """ + + :param job_id: guid id for job + :type job_id: str + :param document_id: guid id for document + :type document_id: str + :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail + """ + return self._client.document_translation.get_document_status(job_id, document_id, **kwargs) + + @distributed_trace + def get_supported_storage_sources(self, **kwargs): + # type: (**Any) -> List[str] + """ + + :rtype: List[str] + """ + return self._client.document_translation.get_document_storage_source(**kwargs) + + @distributed_trace + def get_supported_glossary_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: List[FileFormat] + """ + + return self._client.document_translation.get_glossary_formats(**kwargs) + + @distributed_trace + def get_supported_document_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: List[FileFormat] + """ + + return self._client.document_translation.get_document_formats(**kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/__init__.py new file mode 100644 index 000000000000..5a2929b79fb0 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._batch_document_translation_client import BatchDocumentTranslationClient +__all__ = ['BatchDocumentTranslationClient'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_batch_document_translation_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_batch_document_translation_client.py new file mode 100644 index 000000000000..2d807beae995 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_batch_document_translation_client.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core import PipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + +from ._configuration import BatchDocumentTranslationClientConfiguration +from .operations import DocumentTranslationOperations +from . import models + + +class BatchDocumentTranslationClient(object): + """BatchDocumentTranslationClient. + + :ivar document_translation: DocumentTranslationOperations operations + :vartype document_translation: azure.ai.documenttranslation.operations.DocumentTranslationOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + endpoint, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + base_url = '{endpoint}/translator/text/batch/v1.0-preview.1' + self._config = BatchDocumentTranslationClientConfiguration(credential, endpoint, **kwargs) + self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.document_translation = DocumentTranslationOperations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> BatchDocumentTranslationClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_configuration.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_configuration.py new file mode 100644 index 000000000000..159aa8c93a94 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_configuration.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + +VERSION = "unknown" + +class BatchDocumentTranslationClientConfiguration(Configuration): + """Configuration for BatchDocumentTranslationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + endpoint, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + super(BatchDocumentTranslationClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.endpoint = endpoint + self.credential_scopes = kwargs.pop('credential_scopes', ['https://cognitiveservices.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'ai-documenttranslation/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/__init__.py new file mode 100644 index 000000000000..05bf6f3f777c --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._batch_document_translation_client import BatchDocumentTranslationClient +__all__ = ['BatchDocumentTranslationClient'] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_batch_document_translation_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_batch_document_translation_client.py new file mode 100644 index 000000000000..6fcca5e7db28 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_batch_document_translation_client.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core import AsyncPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import BatchDocumentTranslationClientConfiguration +from .operations import DocumentTranslationOperations +from .. import models + + +class BatchDocumentTranslationClient(object): + """BatchDocumentTranslationClient. + + :ivar document_translation: DocumentTranslationOperations operations + :vartype document_translation: azure.ai.documenttranslation.aio.operations.DocumentTranslationOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + endpoint: str, + **kwargs: Any + ) -> None: + base_url = '{endpoint}/translator/text/batch/v1.0-preview.1' + self._config = BatchDocumentTranslationClientConfiguration(credential, endpoint, **kwargs) + self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.document_translation = DocumentTranslationOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "BatchDocumentTranslationClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_configuration.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_configuration.py new file mode 100644 index 000000000000..f90f307734cb --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_configuration.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +VERSION = "unknown" + +class BatchDocumentTranslationClientConfiguration(Configuration): + """Configuration for BatchDocumentTranslationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + endpoint: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + super(BatchDocumentTranslationClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.endpoint = endpoint + self.credential_scopes = kwargs.pop('credential_scopes', ['https://cognitiveservices.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'ai-documenttranslation/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/__init__.py new file mode 100644 index 000000000000..e524e2215fb7 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/__init__.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._document_translation_operations import DocumentTranslationOperations + +__all__ = [ + 'DocumentTranslationOperations', +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/_document_translation_operations.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/_document_translation_operations.py new file mode 100644 index 000000000000..b8fd6dc69d8c --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/_document_translation_operations.py @@ -0,0 +1,751 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DocumentTranslationOperations: + """DocumentTranslationOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.ai.documenttranslation.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _submit_batch_request_initial( + self, + inputs: List["_models.BatchRequest"], + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + + _body = _models.BatchSubmissionRequest(inputs=inputs) + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._submit_batch_request_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if _body is not None: + body_content = self._serialize.body(_body, 'BatchSubmissionRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Operation-Location']=self._deserialize('str', response.headers.get('Operation-Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + _submit_batch_request_initial.metadata = {'url': '/batches'} # type: ignore + + async def begin_submit_batch_request( + self, + inputs: List["_models.BatchRequest"], + **kwargs + ) -> AsyncLROPoller[None]: + """Submit a document translation request to the Document Translation service. + + Use this API to submit a bulk (batch) translation request to the Document Translation service. + Each request can contain multiple documents and must contain a source and destination container + for each document. + + The prefix and suffix filter (if supplied) are used to filter folders. The prefix is applied to + the subpath after the container name. + + Glossaries / Translation memory can be included in the request and are applied by the service + when the document is translated. + + If the glossary is invalid or unreachable during translation, an error is indicated in the + document status. + If a file with the same name already exists at the destination, it will be overwritten. The + targetUrl for each target language must be unique. + + :param inputs: The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._submit_batch_request_initial( + inputs=inputs, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_submit_batch_request.metadata = {'url': '/batches'} # type: ignore + + def get_operations( + self, + top: Optional[int] = 50, + skip: Optional[int] = 0, + **kwargs + ) -> AsyncIterable["_models.BatchStatusResponse"]: + """Returns a list of batch requests submitted and the status for each request. + + Returns a list of batch requests submitted and the status for each request. + This list only contains batch requests submitted by the user (based on the subscription). The + status for each request is sorted by id. + + If the number of requests exceeds our paging limit, server-side paging is used. Paginated + responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchStatusResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.documenttranslation.models.BatchStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusResponse"] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operations.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('BatchStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + get_operations.metadata = {'url': '/batches'} # type: ignore + + async def get_document_status( + self, + id: str, + document_id: str, + **kwargs + ) -> "_models.DocumentStatusDetail": + """Returns the status for a specific document. + + Returns the translation status for a specific document based on the request Id and document Id. + + :param id: Format - uuid. The batch id. + :type id: str + :param document_id: Format - uuid. The document id. + :type document_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DocumentStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.DocumentStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + 'documentId': self._serialize.url("document_id", document_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('DocumentStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_document_status.metadata = {'url': '/batches/{id}/documents/{documentId}'} # type: ignore + + async def get_operation_status( + self, + id: str, + **kwargs + ) -> "_models.BatchStatusDetail": + """Returns the status for a document translation request. + + Returns the status for a document translation request. + The status includes the overall request status, as well as the status for documents that are + being translated as part of that request. + + :param id: Format - uuid. The operation id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_operation_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_operation_status.metadata = {'url': '/batches/{id}'} # type: ignore + + async def cancel_operation( + self, + id: str, + **kwargs + ) -> "_models.BatchStatusDetail": + """Cancel a currently processing or queued operation. + + Cancel a currently processing or queued operation. + Cancel a currently processing or queued operation. + An operation will not be cancelled if it is already completed or failed or cancelling. A bad + request will be returned. + All documents that have completed translation will not be cancelled and will be charged. + All pending documents will be cancelled if possible. + + :param id: Format - uuid. The operation-id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.cancel_operation.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + cancel_operation.metadata = {'url': '/batches/{id}'} # type: ignore + + def get_operation_documents_status( + self, + id: str, + top: Optional[int] = 50, + skip: Optional[int] = 0, + **kwargs + ) -> AsyncIterable["_models.DocumentStatusResponse"]: + """Returns the status for all documents in a batch document translation request. + + Returns the status for all documents in a batch document translation request. + + The documents included in the response are sorted by document Id in descending order. If the + number of documents in the response exceeds our paging limit, server-side paging is used. + Paginated responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param id: Format - uuid. The operation id. + :type id: str + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DocumentStatusResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.documenttranslation.models.DocumentStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusResponse"] + error_map = { + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operation_documents_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DocumentStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + get_operation_documents_status.metadata = {'url': '/batches/{id}/documents'} # type: ignore + + async def get_document_formats( + self, + **kwargs + ) -> "_models.FileFormatListResult": + """Returns a list of supported document formats. + + The list of supported document formats supported by the Document Translation service. + The list includes the common file extension, as well as the content-type if using the upload + API. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_formats.metadata = {'url': '/documents/formats'} # type: ignore + + async def get_glossary_formats( + self, + **kwargs + ) -> "_models.FileFormatListResult": + """Returns the list of supported glossary formats. + + The list of supported glossary formats supported by the Document Translation service. + The list includes the common file extension used. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_glossary_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_glossary_formats.metadata = {'url': '/glossaries/formats'} # type: ignore + + async def get_document_storage_source( + self, + **kwargs + ) -> "_models.StorageSourceListResult": + """Returns a list of supported storage sources. + + Returns a list of storage sources/options supported by the Document Translation service. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageSourceListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.StorageSourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageSourceListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_storage_source.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('StorageSourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_storage_source.metadata = {'url': '/storagesources'} # type: ignore diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/__init__.py new file mode 100644 index 000000000000..b3ab9ad64a30 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/__init__.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import BatchRequest + from ._models_py3 import BatchStatusDetail + from ._models_py3 import BatchStatusResponse + from ._models_py3 import BatchSubmissionRequest + from ._models_py3 import DocumentFilter + from ._models_py3 import DocumentStatusDetail + from ._models_py3 import DocumentStatusResponse + from ._models_py3 import ErrorResponseV2 + from ._models_py3 import ErrorV2 + from ._models_py3 import FileFormat + from ._models_py3 import FileFormatListResult + from ._models_py3 import Glossary + from ._models_py3 import InnerErrorV2 + from ._models_py3 import SourceInput + from ._models_py3 import StatusSummary + from ._models_py3 import StorageSourceListResult + from ._models_py3 import TargetInput +except (SyntaxError, ImportError): + from ._models import BatchRequest # type: ignore + from ._models import BatchStatusDetail # type: ignore + from ._models import BatchStatusResponse # type: ignore + from ._models import BatchSubmissionRequest # type: ignore + from ._models import DocumentFilter # type: ignore + from ._models import DocumentStatusDetail # type: ignore + from ._models import DocumentStatusResponse # type: ignore + from ._models import ErrorResponseV2 # type: ignore + from ._models import ErrorV2 # type: ignore + from ._models import FileFormat # type: ignore + from ._models import FileFormatListResult # type: ignore + from ._models import Glossary # type: ignore + from ._models import InnerErrorV2 # type: ignore + from ._models import SourceInput # type: ignore + from ._models import StatusSummary # type: ignore + from ._models import StorageSourceListResult # type: ignore + from ._models import TargetInput # type: ignore + +from ._batch_document_translation_client_enums import ( + ErrorCodeV2, + Status, + StorageInputType, + StorageSource, +) + +__all__ = [ + 'BatchRequest', + 'BatchStatusDetail', + 'BatchStatusResponse', + 'BatchSubmissionRequest', + 'DocumentFilter', + 'DocumentStatusDetail', + 'DocumentStatusResponse', + 'ErrorResponseV2', + 'ErrorV2', + 'FileFormat', + 'FileFormatListResult', + 'Glossary', + 'InnerErrorV2', + 'SourceInput', + 'StatusSummary', + 'StorageSourceListResult', + 'TargetInput', + 'ErrorCodeV2', + 'Status', + 'StorageInputType', + 'StorageSource', +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_batch_document_translation_client_enums.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_batch_document_translation_client_enums.py new file mode 100644 index 000000000000..3226ec60d00d --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_batch_document_translation_client_enums.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class ErrorCodeV2(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enums containing high level error codes. + """ + + INVALID_REQUEST = "InvalidRequest" + INVALID_ARGUMENT = "InvalidArgument" + INTERNAL_SERVER_ERROR = "InternalServerError" + SERVICE_UNAVAILABLE = "ServiceUnavailable" + RESOURCE_NOT_FOUND = "ResourceNotFound" + UNAUTHORIZED = "Unauthorized" + REQUEST_RATE_TOO_HIGH = "RequestRateTooHigh" + +class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """List of possible statuses for job or document + """ + + NOT_STARTED = "NotStarted" + RUNNING = "Running" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" + CANCELLING = "Cancelling" + VALIDATION_FAILED = "ValidationFailed" + +class StorageInputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Storage type of the input documents source string + """ + + FOLDER = "Folder" + FILE = "File" + +class StorageSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Storage Source + """ + + AZURE_BLOB = "AzureBlob" diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models.py new file mode 100644 index 000000000000..9a45dde69780 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models.py @@ -0,0 +1,650 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class BatchRequest(msrest.serialization.Model): + """Definition for the input batch translation request. + + All required parameters must be populated in order to send to Azure. + + :param source: Required. Source of the input documents. + :type source: ~azure.ai.documenttranslation.models.SourceInput + :param targets: Required. Location of the destination for the output. + :type targets: list[~azure.ai.documenttranslation.models.TargetInput] + :param storage_type: Storage type of the input documents source string. Possible values + include: "Folder", "File". + :type storage_type: str or ~azure.ai.documenttranslation.models.StorageInputType + """ + + _validation = { + 'source': {'required': True}, + 'targets': {'required': True}, + } + + _attribute_map = { + 'source': {'key': 'source', 'type': 'SourceInput'}, + 'targets': {'key': 'targets', 'type': '[TargetInput]'}, + 'storage_type': {'key': 'storageType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchRequest, self).__init__(**kwargs) + self.source = kwargs['source'] + self.targets = kwargs['targets'] + self.storage_type = kwargs.get('storage_type', None) + + +class BatchStatusDetail(msrest.serialization.Model): + """Job status response. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. Id of the operation. + :type id: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param summary: Required. + :type summary: ~azure.ai.documenttranslation.models.StatusSummary + """ + + _validation = { + 'id': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'summary': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'summary': {'key': 'summary', 'type': 'StatusSummary'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchStatusDetail, self).__init__(**kwargs) + self.id = kwargs['id'] + self.created_date_time_utc = kwargs['created_date_time_utc'] + self.last_action_date_time_utc = kwargs['last_action_date_time_utc'] + self.status = kwargs['status'] + self.error = kwargs.get('error', None) + self.summary = kwargs['summary'] + + +class BatchStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The summary status of individual operation. + :type value: list[~azure.ai.documenttranslation.models.BatchStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BatchStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchStatusResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class BatchSubmissionRequest(msrest.serialization.Model): + """Job submission batch request. + + All required parameters must be populated in order to send to Azure. + + :param inputs: Required. The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + """ + + _validation = { + 'inputs': {'required': True}, + } + + _attribute_map = { + 'inputs': {'key': 'inputs', 'type': '[BatchRequest]'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchSubmissionRequest, self).__init__(**kwargs) + self.inputs = kwargs['inputs'] + + +class DocumentFilter(msrest.serialization.Model): + """DocumentFilter. + + :param prefix: A case-sensitive prefix string to filter documents in the source path for + translation. + For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for + translation. + :type prefix: str + :param suffix: A case-sensitive suffix string to filter documents in the source path for + translation. + This is most often use for file extensions. + :type suffix: str + """ + + _attribute_map = { + 'prefix': {'key': 'prefix', 'type': 'str'}, + 'suffix': {'key': 'suffix', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentFilter, self).__init__(**kwargs) + self.prefix = kwargs.get('prefix', None) + self.suffix = kwargs.get('suffix', None) + + +class DocumentStatusDetail(msrest.serialization.Model): + """DocumentStatusDetail. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. Location of the document or folder. + :type path: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param to: Required. To language. + :type to: str + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param progress: Required. Progress of the translation if available. + :type progress: float + :param id: Required. Document Id. + :type id: str + :param character_charged: Character charged by the API. + :type character_charged: long + """ + + _validation = { + 'path': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'to': {'required': True}, + 'progress': {'required': True, 'maximum': 1, 'minimum': 0}, + 'id': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'to': {'key': 'to', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'progress': {'key': 'progress', 'type': 'float'}, + 'id': {'key': 'id', 'type': 'str'}, + 'character_charged': {'key': 'characterCharged', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentStatusDetail, self).__init__(**kwargs) + self.path = kwargs['path'] + self.created_date_time_utc = kwargs['created_date_time_utc'] + self.last_action_date_time_utc = kwargs['last_action_date_time_utc'] + self.status = kwargs['status'] + self.to = kwargs['to'] + self.error = kwargs.get('error', None) + self.progress = kwargs['progress'] + self.id = kwargs['id'] + self.character_charged = kwargs.get('character_charged', None) + + +class DocumentStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The detail status of individual documents. + :type value: list[~azure.ai.documenttranslation.models.DocumentStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DocumentStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentStatusResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class ErrorResponseV2(msrest.serialization.Model): + """Contains unified error information used for HTTP responses across any Cognitive Service. Instances +can be created either through Microsoft.CloudAI.Containers.HttpStatusExceptionV2 or by returning it directly from +a controller. + + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorV2'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponseV2, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorV2(msrest.serialization.Model): + """This contains an outer error with error code, message, details, target and an inner error with more descriptive details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Enums containing high level error codes. Possible values include: + "InvalidRequest", "InvalidArgument", "InternalServerError", "ServiceUnavailable", + "ResourceNotFound", "Unauthorized", "RequestRateTooHigh". + :type code: str or ~azure.ai.documenttranslation.models.ErrorCodeV2 + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorV2, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs['message'] + self.target = None + self.inner_error = kwargs.get('inner_error', None) + + +class FileFormat(msrest.serialization.Model): + """FileFormat. + + :param format: Name of the format. + :type format: str + :param file_extensions: Supported file extension for this format. + :type file_extensions: list[str] + :param content_types: Supported Content-Types for this format. + :type content_types: list[str] + :param versions: Supported Version. + :type versions: list[str] + """ + + _attribute_map = { + 'format': {'key': 'format', 'type': 'str'}, + 'file_extensions': {'key': 'fileExtensions', 'type': '[str]'}, + 'content_types': {'key': 'contentTypes', 'type': '[str]'}, + 'versions': {'key': 'versions', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(FileFormat, self).__init__(**kwargs) + self.format = kwargs.get('format', None) + self.file_extensions = kwargs.get('file_extensions', None) + self.content_types = kwargs.get('content_types', None) + self.versions = kwargs.get('versions', None) + + +class FileFormatListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[~azure.ai.documenttranslation.models.FileFormat] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileFormat]'}, + } + + def __init__( + self, + **kwargs + ): + super(FileFormatListResult, self).__init__(**kwargs) + self.value = kwargs['value'] + + +class Glossary(msrest.serialization.Model): + """Glossary / translation memory for the request. + + All required parameters must be populated in order to send to Azure. + + :param glossary_url: Required. Location of the glossary. + We will use the file extension to extract the formatting if the format parameter is not + supplied. + + If the translation language pair is not present in the glossary, it will not be applied. + :type glossary_url: str + :param format: Format. + :type format: str + :param version: Version. + :type version: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'glossary_url': {'required': True}, + } + + _attribute_map = { + 'glossary_url': {'key': 'glossaryUrl', 'type': 'str'}, + 'format': {'key': 'format', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Glossary, self).__init__(**kwargs) + self.glossary_url = kwargs['glossary_url'] + self.format = kwargs.get('format', None) + self.version = kwargs.get('version', None) + self.storage_source = kwargs.get('storage_source', None) + + +class InnerErrorV2(msrest.serialization.Model): + """New Inner Error format which conforms to Cognitive Services API Guidelines which is available at https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. +This contains required properties ErrorCode, message and optional properties target, details(key value pair), inner error(this can be nested). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Gets code error string. + :type code: str + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + **kwargs + ): + super(InnerErrorV2, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.target = None + self.inner_error = kwargs.get('inner_error', None) + + +class SourceInput(msrest.serialization.Model): + """Source of the input documents. + + All required parameters must be populated in order to send to Azure. + + :param source_url: Required. Location of the folder / container or single file with your + documents. + :type source_url: str + :param filter: + :type filter: ~azure.ai.documenttranslation.models.DocumentFilter + :param language: Language code + If none is specified, we will perform auto detect on the document. + :type language: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'source_url': {'required': True}, + } + + _attribute_map = { + 'source_url': {'key': 'sourceUrl', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'DocumentFilter'}, + 'language': {'key': 'language', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SourceInput, self).__init__(**kwargs) + self.source_url = kwargs['source_url'] + self.filter = kwargs.get('filter', None) + self.language = kwargs.get('language', None) + self.storage_source = kwargs.get('storage_source', None) + + +class StatusSummary(msrest.serialization.Model): + """StatusSummary. + + All required parameters must be populated in order to send to Azure. + + :param total: Required. Total count. + :type total: int + :param failed: Required. Failed count. + :type failed: int + :param success: Required. Number of Success. + :type success: int + :param in_progress: Required. Number of in progress. + :type in_progress: int + :param not_yet_started: Required. Count of not yet started. + :type not_yet_started: int + :param cancelled: Required. Number of cancelled. + :type cancelled: int + :param total_character_charged: Required. Total characters charged by the API. + :type total_character_charged: long + """ + + _validation = { + 'total': {'required': True}, + 'failed': {'required': True}, + 'success': {'required': True}, + 'in_progress': {'required': True}, + 'not_yet_started': {'required': True}, + 'cancelled': {'required': True}, + 'total_character_charged': {'required': True}, + } + + _attribute_map = { + 'total': {'key': 'total', 'type': 'int'}, + 'failed': {'key': 'failed', 'type': 'int'}, + 'success': {'key': 'success', 'type': 'int'}, + 'in_progress': {'key': 'inProgress', 'type': 'int'}, + 'not_yet_started': {'key': 'notYetStarted', 'type': 'int'}, + 'cancelled': {'key': 'cancelled', 'type': 'int'}, + 'total_character_charged': {'key': 'totalCharacterCharged', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(StatusSummary, self).__init__(**kwargs) + self.total = kwargs['total'] + self.failed = kwargs['failed'] + self.success = kwargs['success'] + self.in_progress = kwargs['in_progress'] + self.not_yet_started = kwargs['not_yet_started'] + self.cancelled = kwargs['cancelled'] + self.total_character_charged = kwargs['total_character_charged'] + + +class StorageSourceListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[str or ~azure.ai.documenttranslation.models.StorageSource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageSourceListResult, self).__init__(**kwargs) + self.value = kwargs['value'] + + +class TargetInput(msrest.serialization.Model): + """Destination for the finished translated documents. + + All required parameters must be populated in order to send to Azure. + + :param target_url: Required. Location of the folder / container with your documents. + :type target_url: str + :param category: Category / custom system for translation request. + :type category: str + :param language: Required. Target Language. + :type language: str + :param glossaries: List of Glossary. + :type glossaries: list[~azure.ai.documenttranslation.models.Glossary] + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'target_url': {'required': True}, + 'language': {'required': True}, + } + + _attribute_map = { + 'target_url': {'key': 'targetUrl', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'language': {'key': 'language', 'type': 'str'}, + 'glossaries': {'key': 'glossaries', 'type': '[Glossary]'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TargetInput, self).__init__(**kwargs) + self.target_url = kwargs['target_url'] + self.category = kwargs.get('category', None) + self.language = kwargs['language'] + self.glossaries = kwargs.get('glossaries', None) + self.storage_source = kwargs.get('storage_source', None) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models_py3.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models_py3.py new file mode 100644 index 000000000000..5502c4b68467 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models_py3.py @@ -0,0 +1,730 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._batch_document_translation_client_enums import * + + +class BatchRequest(msrest.serialization.Model): + """Definition for the input batch translation request. + + All required parameters must be populated in order to send to Azure. + + :param source: Required. Source of the input documents. + :type source: ~azure.ai.documenttranslation.models.SourceInput + :param targets: Required. Location of the destination for the output. + :type targets: list[~azure.ai.documenttranslation.models.TargetInput] + :param storage_type: Storage type of the input documents source string. Possible values + include: "Folder", "File". + :type storage_type: str or ~azure.ai.documenttranslation.models.StorageInputType + """ + + _validation = { + 'source': {'required': True}, + 'targets': {'required': True}, + } + + _attribute_map = { + 'source': {'key': 'source', 'type': 'SourceInput'}, + 'targets': {'key': 'targets', 'type': '[TargetInput]'}, + 'storage_type': {'key': 'storageType', 'type': 'str'}, + } + + def __init__( + self, + *, + source: "SourceInput", + targets: List["TargetInput"], + storage_type: Optional[Union[str, "StorageInputType"]] = None, + **kwargs + ): + super(BatchRequest, self).__init__(**kwargs) + self.source = source + self.targets = targets + self.storage_type = storage_type + + +class BatchStatusDetail(msrest.serialization.Model): + """Job status response. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. Id of the operation. + :type id: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param summary: Required. + :type summary: ~azure.ai.documenttranslation.models.StatusSummary + """ + + _validation = { + 'id': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'summary': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'summary': {'key': 'summary', 'type': 'StatusSummary'}, + } + + def __init__( + self, + *, + id: str, + created_date_time_utc: datetime.datetime, + last_action_date_time_utc: datetime.datetime, + status: Union[str, "Status"], + summary: "StatusSummary", + error: Optional["ErrorV2"] = None, + **kwargs + ): + super(BatchStatusDetail, self).__init__(**kwargs) + self.id = id + self.created_date_time_utc = created_date_time_utc + self.last_action_date_time_utc = last_action_date_time_utc + self.status = status + self.error = error + self.summary = summary + + +class BatchStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The summary status of individual operation. + :type value: list[~azure.ai.documenttranslation.models.BatchStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BatchStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["BatchStatusDetail"], + next_link: Optional[str] = None, + **kwargs + ): + super(BatchStatusResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BatchSubmissionRequest(msrest.serialization.Model): + """Job submission batch request. + + All required parameters must be populated in order to send to Azure. + + :param inputs: Required. The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + """ + + _validation = { + 'inputs': {'required': True}, + } + + _attribute_map = { + 'inputs': {'key': 'inputs', 'type': '[BatchRequest]'}, + } + + def __init__( + self, + *, + inputs: List["BatchRequest"], + **kwargs + ): + super(BatchSubmissionRequest, self).__init__(**kwargs) + self.inputs = inputs + + +class DocumentFilter(msrest.serialization.Model): + """DocumentFilter. + + :param prefix: A case-sensitive prefix string to filter documents in the source path for + translation. + For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for + translation. + :type prefix: str + :param suffix: A case-sensitive suffix string to filter documents in the source path for + translation. + This is most often use for file extensions. + :type suffix: str + """ + + _attribute_map = { + 'prefix': {'key': 'prefix', 'type': 'str'}, + 'suffix': {'key': 'suffix', 'type': 'str'}, + } + + def __init__( + self, + *, + prefix: Optional[str] = None, + suffix: Optional[str] = None, + **kwargs + ): + super(DocumentFilter, self).__init__(**kwargs) + self.prefix = prefix + self.suffix = suffix + + +class DocumentStatusDetail(msrest.serialization.Model): + """DocumentStatusDetail. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. Location of the document or folder. + :type path: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param to: Required. To language. + :type to: str + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param progress: Required. Progress of the translation if available. + :type progress: float + :param id: Required. Document Id. + :type id: str + :param character_charged: Character charged by the API. + :type character_charged: long + """ + + _validation = { + 'path': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'to': {'required': True}, + 'progress': {'required': True, 'maximum': 1, 'minimum': 0}, + 'id': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'to': {'key': 'to', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'progress': {'key': 'progress', 'type': 'float'}, + 'id': {'key': 'id', 'type': 'str'}, + 'character_charged': {'key': 'characterCharged', 'type': 'long'}, + } + + def __init__( + self, + *, + path: str, + created_date_time_utc: datetime.datetime, + last_action_date_time_utc: datetime.datetime, + status: Union[str, "Status"], + to: str, + progress: float, + id: str, + error: Optional["ErrorV2"] = None, + character_charged: Optional[int] = None, + **kwargs + ): + super(DocumentStatusDetail, self).__init__(**kwargs) + self.path = path + self.created_date_time_utc = created_date_time_utc + self.last_action_date_time_utc = last_action_date_time_utc + self.status = status + self.to = to + self.error = error + self.progress = progress + self.id = id + self.character_charged = character_charged + + +class DocumentStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The detail status of individual documents. + :type value: list[~azure.ai.documenttranslation.models.DocumentStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DocumentStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["DocumentStatusDetail"], + next_link: Optional[str] = None, + **kwargs + ): + super(DocumentStatusResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ErrorResponseV2(msrest.serialization.Model): + """Contains unified error information used for HTTP responses across any Cognitive Service. Instances +can be created either through Microsoft.CloudAI.Containers.HttpStatusExceptionV2 or by returning it directly from +a controller. + + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorV2'}, + } + + def __init__( + self, + *, + error: Optional["ErrorV2"] = None, + **kwargs + ): + super(ErrorResponseV2, self).__init__(**kwargs) + self.error = error + + +class ErrorV2(msrest.serialization.Model): + """This contains an outer error with error code, message, details, target and an inner error with more descriptive details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Enums containing high level error codes. Possible values include: + "InvalidRequest", "InvalidArgument", "InternalServerError", "ServiceUnavailable", + "ResourceNotFound", "Unauthorized", "RequestRateTooHigh". + :type code: str or ~azure.ai.documenttranslation.models.ErrorCodeV2 + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + *, + message: str, + code: Optional[Union[str, "ErrorCodeV2"]] = None, + inner_error: Optional["InnerErrorV2"] = None, + **kwargs + ): + super(ErrorV2, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = None + self.inner_error = inner_error + + +class FileFormat(msrest.serialization.Model): + """FileFormat. + + :param format: Name of the format. + :type format: str + :param file_extensions: Supported file extension for this format. + :type file_extensions: list[str] + :param content_types: Supported Content-Types for this format. + :type content_types: list[str] + :param versions: Supported Version. + :type versions: list[str] + """ + + _attribute_map = { + 'format': {'key': 'format', 'type': 'str'}, + 'file_extensions': {'key': 'fileExtensions', 'type': '[str]'}, + 'content_types': {'key': 'contentTypes', 'type': '[str]'}, + 'versions': {'key': 'versions', 'type': '[str]'}, + } + + def __init__( + self, + *, + format: Optional[str] = None, + file_extensions: Optional[List[str]] = None, + content_types: Optional[List[str]] = None, + versions: Optional[List[str]] = None, + **kwargs + ): + super(FileFormat, self).__init__(**kwargs) + self.format = format + self.file_extensions = file_extensions + self.content_types = content_types + self.versions = versions + + +class FileFormatListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[~azure.ai.documenttranslation.models.FileFormat] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileFormat]'}, + } + + def __init__( + self, + *, + value: List["FileFormat"], + **kwargs + ): + super(FileFormatListResult, self).__init__(**kwargs) + self.value = value + + +class Glossary(msrest.serialization.Model): + """Glossary / translation memory for the request. + + All required parameters must be populated in order to send to Azure. + + :param glossary_url: Required. Location of the glossary. + We will use the file extension to extract the formatting if the format parameter is not + supplied. + + If the translation language pair is not present in the glossary, it will not be applied. + :type glossary_url: str + :param format: Format. + :type format: str + :param version: Version. + :type version: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'glossary_url': {'required': True}, + } + + _attribute_map = { + 'glossary_url': {'key': 'glossaryUrl', 'type': 'str'}, + 'format': {'key': 'format', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + *, + glossary_url: str, + format: Optional[str] = None, + version: Optional[str] = None, + storage_source: Optional[Union[str, "StorageSource"]] = None, + **kwargs + ): + super(Glossary, self).__init__(**kwargs) + self.glossary_url = glossary_url + self.format = format + self.version = version + self.storage_source = storage_source + + +class InnerErrorV2(msrest.serialization.Model): + """New Inner Error format which conforms to Cognitive Services API Guidelines which is available at https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. +This contains required properties ErrorCode, message and optional properties target, details(key value pair), inner error(this can be nested). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Gets code error string. + :type code: str + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + *, + code: str, + message: str, + inner_error: Optional["InnerErrorV2"] = None, + **kwargs + ): + super(InnerErrorV2, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = None + self.inner_error = inner_error + + +class SourceInput(msrest.serialization.Model): + """Source of the input documents. + + All required parameters must be populated in order to send to Azure. + + :param source_url: Required. Location of the folder / container or single file with your + documents. + :type source_url: str + :param filter: + :type filter: ~azure.ai.documenttranslation.models.DocumentFilter + :param language: Language code + If none is specified, we will perform auto detect on the document. + :type language: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'source_url': {'required': True}, + } + + _attribute_map = { + 'source_url': {'key': 'sourceUrl', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'DocumentFilter'}, + 'language': {'key': 'language', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + *, + source_url: str, + filter: Optional["DocumentFilter"] = None, + language: Optional[str] = None, + storage_source: Optional[Union[str, "StorageSource"]] = None, + **kwargs + ): + super(SourceInput, self).__init__(**kwargs) + self.source_url = source_url + self.filter = filter + self.language = language + self.storage_source = storage_source + + +class StatusSummary(msrest.serialization.Model): + """StatusSummary. + + All required parameters must be populated in order to send to Azure. + + :param total: Required. Total count. + :type total: int + :param failed: Required. Failed count. + :type failed: int + :param success: Required. Number of Success. + :type success: int + :param in_progress: Required. Number of in progress. + :type in_progress: int + :param not_yet_started: Required. Count of not yet started. + :type not_yet_started: int + :param cancelled: Required. Number of cancelled. + :type cancelled: int + :param total_character_charged: Required. Total characters charged by the API. + :type total_character_charged: long + """ + + _validation = { + 'total': {'required': True}, + 'failed': {'required': True}, + 'success': {'required': True}, + 'in_progress': {'required': True}, + 'not_yet_started': {'required': True}, + 'cancelled': {'required': True}, + 'total_character_charged': {'required': True}, + } + + _attribute_map = { + 'total': {'key': 'total', 'type': 'int'}, + 'failed': {'key': 'failed', 'type': 'int'}, + 'success': {'key': 'success', 'type': 'int'}, + 'in_progress': {'key': 'inProgress', 'type': 'int'}, + 'not_yet_started': {'key': 'notYetStarted', 'type': 'int'}, + 'cancelled': {'key': 'cancelled', 'type': 'int'}, + 'total_character_charged': {'key': 'totalCharacterCharged', 'type': 'long'}, + } + + def __init__( + self, + *, + total: int, + failed: int, + success: int, + in_progress: int, + not_yet_started: int, + cancelled: int, + total_character_charged: int, + **kwargs + ): + super(StatusSummary, self).__init__(**kwargs) + self.total = total + self.failed = failed + self.success = success + self.in_progress = in_progress + self.not_yet_started = not_yet_started + self.cancelled = cancelled + self.total_character_charged = total_character_charged + + +class StorageSourceListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[str or ~azure.ai.documenttranslation.models.StorageSource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[str]'}, + } + + def __init__( + self, + *, + value: List[Union[str, "StorageSource"]], + **kwargs + ): + super(StorageSourceListResult, self).__init__(**kwargs) + self.value = value + + +class TargetInput(msrest.serialization.Model): + """Destination for the finished translated documents. + + All required parameters must be populated in order to send to Azure. + + :param target_url: Required. Location of the folder / container with your documents. + :type target_url: str + :param category: Category / custom system for translation request. + :type category: str + :param language: Required. Target Language. + :type language: str + :param glossaries: List of Glossary. + :type glossaries: list[~azure.ai.documenttranslation.models.Glossary] + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'target_url': {'required': True}, + 'language': {'required': True}, + } + + _attribute_map = { + 'target_url': {'key': 'targetUrl', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'language': {'key': 'language', 'type': 'str'}, + 'glossaries': {'key': 'glossaries', 'type': '[Glossary]'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + *, + target_url: str, + language: str, + category: Optional[str] = None, + glossaries: Optional[List["Glossary"]] = None, + storage_source: Optional[Union[str, "StorageSource"]] = None, + **kwargs + ): + super(TargetInput, self).__init__(**kwargs) + self.target_url = target_url + self.category = category + self.language = language + self.glossaries = glossaries + self.storage_source = storage_source diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/__init__.py new file mode 100644 index 000000000000..e524e2215fb7 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/__init__.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._document_translation_operations import DocumentTranslationOperations + +__all__ = [ + 'DocumentTranslationOperations', +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/_document_translation_operations.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/_document_translation_operations.py new file mode 100644 index 000000000000..3535e562022e --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/_document_translation_operations.py @@ -0,0 +1,765 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DocumentTranslationOperations(object): + """DocumentTranslationOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.ai.documenttranslation.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def _submit_batch_request_initial( + self, + inputs, # type: List["_models.BatchRequest"] + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + + _body = _models.BatchSubmissionRequest(inputs=inputs) + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._submit_batch_request_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if _body is not None: + body_content = self._serialize.body(_body, 'BatchSubmissionRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Operation-Location']=self._deserialize('str', response.headers.get('Operation-Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + _submit_batch_request_initial.metadata = {'url': '/batches'} # type: ignore + + def begin_submit_batch_request( + self, + inputs, # type: List["_models.BatchRequest"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Submit a document translation request to the Document Translation service. + + Use this API to submit a bulk (batch) translation request to the Document Translation service. + Each request can contain multiple documents and must contain a source and destination container + for each document. + + The prefix and suffix filter (if supplied) are used to filter folders. The prefix is applied to + the subpath after the container name. + + Glossaries / Translation memory can be included in the request and are applied by the service + when the document is translated. + + If the glossary is invalid or unreachable during translation, an error is indicated in the + document status. + If a file with the same name already exists at the destination, it will be overwritten. The + targetUrl for each target language must be unique. + + :param inputs: The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._submit_batch_request_initial( + inputs=inputs, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_submit_batch_request.metadata = {'url': '/batches'} # type: ignore + + def get_operations( + self, + top=50, # type: Optional[int] + skip=0, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.BatchStatusResponse"] + """Returns a list of batch requests submitted and the status for each request. + + Returns a list of batch requests submitted and the status for each request. + This list only contains batch requests submitted by the user (based on the subscription). The + status for each request is sorted by id. + + If the number of requests exceeds our paging limit, server-side paging is used. Paginated + responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchStatusResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.documenttranslation.models.BatchStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusResponse"] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operations.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('BatchStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + get_operations.metadata = {'url': '/batches'} # type: ignore + + def get_document_status( + self, + id, # type: str + document_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.DocumentStatusDetail" + """Returns the status for a specific document. + + Returns the translation status for a specific document based on the request Id and document Id. + + :param id: Format - uuid. The batch id. + :type id: str + :param document_id: Format - uuid. The document id. + :type document_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DocumentStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.DocumentStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + 'documentId': self._serialize.url("document_id", document_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('DocumentStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_document_status.metadata = {'url': '/batches/{id}/documents/{documentId}'} # type: ignore + + def get_operation_status( + self, + id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.BatchStatusDetail" + """Returns the status for a document translation request. + + Returns the status for a document translation request. + The status includes the overall request status, as well as the status for documents that are + being translated as part of that request. + + :param id: Format - uuid. The operation id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_operation_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_operation_status.metadata = {'url': '/batches/{id}'} # type: ignore + + def cancel_operation( + self, + id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.BatchStatusDetail" + """Cancel a currently processing or queued operation. + + Cancel a currently processing or queued operation. + Cancel a currently processing or queued operation. + An operation will not be cancelled if it is already completed or failed or cancelling. A bad + request will be returned. + All documents that have completed translation will not be cancelled and will be charged. + All pending documents will be cancelled if possible. + + :param id: Format - uuid. The operation-id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.cancel_operation.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + cancel_operation.metadata = {'url': '/batches/{id}'} # type: ignore + + def get_operation_documents_status( + self, + id, # type: str + top=50, # type: Optional[int] + skip=0, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.DocumentStatusResponse"] + """Returns the status for all documents in a batch document translation request. + + Returns the status for all documents in a batch document translation request. + + The documents included in the response are sorted by document Id in descending order. If the + number of documents in the response exceeds our paging limit, server-side paging is used. + Paginated responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param id: Format - uuid. The operation id. + :type id: str + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DocumentStatusResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.documenttranslation.models.DocumentStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusResponse"] + error_map = { + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operation_documents_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DocumentStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + get_operation_documents_status.metadata = {'url': '/batches/{id}/documents'} # type: ignore + + def get_document_formats( + self, + **kwargs # type: Any + ): + # type: (...) -> "_models.FileFormatListResult" + """Returns a list of supported document formats. + + The list of supported document formats supported by the Document Translation service. + The list includes the common file extension, as well as the content-type if using the upload + API. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_formats.metadata = {'url': '/documents/formats'} # type: ignore + + def get_glossary_formats( + self, + **kwargs # type: Any + ): + # type: (...) -> "_models.FileFormatListResult" + """Returns the list of supported glossary formats. + + The list of supported glossary formats supported by the Document Translation service. + The list includes the common file extension used. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_glossary_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_glossary_formats.metadata = {'url': '/glossaries/formats'} # type: ignore + + def get_document_storage_source( + self, + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageSourceListResult" + """Returns a list of supported storage sources. + + Returns a list of storage sources/options supported by the Document Translation service. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageSourceListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.StorageSourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageSourceListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_storage_source.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('StorageSourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_storage_source.metadata = {'url': '/storagesources'} # type: ignore diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/py.typed b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py new file mode 100644 index 000000000000..c9a81ef84b07 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline.policies import AzureKeyCredentialPolicy + +COGNITIVE_KEY_HEADER = "Ocp-Apim-Subscription-Key" + + +def get_authentication_policy(credential): + authentication_policy = None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if isinstance(credential, AzureKeyCredential): + authentication_policy = AzureKeyCredentialPolicy( + name=COGNITIVE_KEY_HEADER, credential=credential + ) + elif credential is not None and not hasattr(credential, "get_token"): + raise TypeError("Unsupported credential: {}. Use an instance of AzureKeyCredential " + "or a token credential from azure.identity".format(type(credential))) + + return authentication_policy diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py new file mode 100644 index 000000000000..e5928093c103 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py @@ -0,0 +1,235 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from typing import Any, List + + +class TranslationGlossary(object): + """Glossary / translation memory for the request. + + :param glossary_url: Required. Location of the glossary. + We will use the file extension to extract the formatting if the format parameter is not + supplied. + If the translation language pair is not present in the glossary, it will not be applied. + :type glossary_url: str + :keyword str format: Format. + :keyword str format_version: Format version. + :keyword storage_source: Storage Source. Default value: "AzureBlob". + :paramtype storage_source: str + """ + + def __init__( + self, + glossary_url, + **kwargs + ): + # type: (str, **Any) -> None + self.glossary_url = glossary_url + self.format = kwargs.get("format", None) + self.format_version = kwargs.get("format_version", None) + self.storage_source = kwargs.get("storage_source", None) + + +class StorageTarget(object): + """Destination for the finished translated documents. + + :param target_url: Required. Location of the folder / container with your documents. + :type target_url: str + :param language: Required. Target Language. + :type language: str + :keyword str category_id: Category / custom system for translation request. + :keyword glossaries: List of TranslationGlossary. + :paramtype glossaries: Union[list[str], list[~azure.ai.documenttranslation.TranslationGlossary]] + :keyword storage_source: Storage Source. Default value: "AzureBlob". + :paramtype storage_source: str + """ + + def __init__( + self, + target_url, + language, + **kwargs + ): + # type: (str, str, **Any) -> None + self.target_url = target_url + self.language = language + self.category_id = kwargs.get("category_id", None) + self.glossaries = kwargs.get("glossaries", None) + self.storage_source = kwargs.get("storage_source", None) + + +class BatchDocumentInput(object): + """Definition for the input batch translation request. + + :param source_url: Required. Location of the folder / container or single file with your + documents. + :type source_url: str + :param targets: Required. Location of the destination for the output. + :type targets: list[StorageTarget] + :keyword str source_language: Language code + If none is specified, we will perform auto detect on the document. + :keyword str prefix: A case-sensitive prefix string to filter documents in the source path for + translation. For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for + translation. + :keyword str suffix: A case-sensitive suffix string to filter documents in the source path for + translation. This is most often use for file extensions. + :keyword storage_type: Storage type of the input documents source string. Possible values + include: "Folder", "File". + :paramtype storage_type: str or ~azure.ai.documenttranslation.StorageInputType + :keyword str storage_source: Storage Source. Default value: "AzureBlob". + """ + + def __init__( + self, + source_url, + targets, + **kwargs + ): + # type: (str, List[StorageTarget], **Any) -> None + self.source_url = source_url + self.targets = targets + self.source_language = kwargs.get("source_language", None) + self.storage_type = kwargs.get("storage_type", None) + self.storage_source = kwargs.get("storage_source", None) + self.prefix = kwargs.get("prefix", None) + self.suffix = kwargs.get("suffix", None) + + +class JobStatusDetail(object): + """Job status response. + + :ivar id: Required. Id of the job. + :vartype id: str + :ivar created_on: Required. Operation created date time. + :vartype created_on: ~datetime.datetime + :ivar last_updated_on: Required. Date time in which the operation's status has been + updated. + :vartype last_updated_on: ~datetime.datetime + :ivar status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :vartype status: str + :ivar error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :vartype error: ~azure.ai.documenttranslation.DocumentTranslationError + :ivar int documents_total_count: Total count. + :ivar int documents_failed_count: Failed count. + :ivar int documents_succeeded_count: Number of Success. + :ivar int documents_in_progress_count: Number of in progress. + :ivar int documents_not_yet_started_count: Count of not yet started. + :ivar int documents_cancelled_count: Number of cancelled. + :ivar int total_characters_charged: Required. Total characters charged by the API. + + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.id = kwargs['id'] + self.created_on = kwargs['created_on'] + self.last_updated_on = kwargs['last_updated_on'] + self.status = kwargs.get('status', None) + self.error = kwargs.get("error", None) + self.documents_total_count = kwargs.get('documents_total_count', None) + self.documents_failed_count = kwargs.get('documents_failed_count', None) + self.documents_succeeded_count = kwargs.get('documents_succeeded_count', None) + self.documents_in_progress_count = kwargs.get('documents_in_progress_count', None) + self.documents_not_yet_started_count = kwargs.get('documents_not_yet_started_count', None) + self.documents_cancelled_count = kwargs.get('documents_cancelled_count', None) + self.total_characters_charged = kwargs.get('total_characters_charged', None) + + +class DocumentStatusDetail(object): + """DocumentStatusDetail. + + :ivar url: Required. Location of the document or folder. + :vartype url: str + :ivar created_on: Required. Operation created date time. + :vartype created_on: ~datetime.datetime + :ivar last_updated_on: Required. Date time in which the operation's status has been + updated. + :vartype last_updated_on: ~datetime.datetime + :ivar status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :vartype status: str + :ivar translate_to: Required. To language. + :vartype translate_to: str + :ivar error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :vartype error: ~azure.ai.documenttranslation.DocumentTranslationError + :ivar translation_progress: Progress of the translation if available. + :vartype translation_progress: float + :ivar id: Document Id. + :vartype id: str + :ivar int characters_charged: Character charged by the API. + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.url = kwargs['url'] + self.created_on = kwargs['created_on'] + self.last_updated_on = kwargs['last_updated_on'] + self.status = kwargs['status'] + self.translate_to = kwargs['translate_to'] + self.error = kwargs.get('error', None) + self.translation_progress = kwargs.get('translation_progress', None) + self.id = kwargs.get('id', None) + self.characters_charged = kwargs.get('characters_charged', None) + + +class DocumentTranslationError(object): + """This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + + :ivar code: Enums containing high level error codes. Possible values include: + "InvalidRequest", "InvalidArgument", "InternalServerError", "ServiceUnavailable", + "ResourceNotFound", "Unauthorized", "RequestRateTooHigh". + :vartype code: str + :ivar message: Gets high level error message. + :vartype message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.code = kwargs.get('code', None) + self.message = None + self.target = None + + +class FileFormat(object): + """FileFormat. + + :ivar format: Name of the format. + :vartype format: str + :ivar file_extensions: Supported file extension for this format. + :vartype file_extensions: list[str] + :ivar content_types: Supported Content-Types for this format. + :vartype content_types: list[str] + :ivar versions: Supported Version. + :vartype versions: list[str] + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.format = kwargs.get('format', None) + self.file_extensions = kwargs.get('file_extensions', None) + self.content_types = kwargs.get('content_types', None) + self.versions = kwargs.get('versions', None) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_user_agent.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_user_agent.py new file mode 100644 index 000000000000..9f13f549de6a --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_user_agent.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from ._version import VERSION + +USER_AGENT = "ai-translatortext/{}".format(VERSION) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_version.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_version.py new file mode 100644 index 000000000000..3a9726d91eee --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_version.py @@ -0,0 +1,7 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +VERSION = "1.0.0b1" diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/__init__.py new file mode 100644 index 000000000000..dd05613d4fe4 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/__init__.py @@ -0,0 +1,11 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from ._client_async import DocumentTranslationClient + +__all__ = [ + "DocumentTranslationClient" +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py new file mode 100644 index 000000000000..eaededf5f0ee --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py @@ -0,0 +1,169 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from typing import Union, Any, List, TYPE_CHECKING +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace +from azure.core.async_paging import AsyncItemPaged +from .._generated.aio import BatchDocumentTranslationClient as _BatchDocumentTranslationClient +from .._user_agent import USER_AGENT +from .._models import JobStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat +from .._helpers import get_authentication_policy +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.credentials import AzureKeyCredential + + +class DocumentTranslationClient(object): + """DocumentTranslationClient + + """ + + def __init__( + self, endpoint: str, credential: Union["AzureKeyCredential", "AsyncTokenCredential"], **kwargs: Any + ) -> None: + """ + + :param str endpoint: + :param credential: + :type credential: Union[AzureKeyCredential, AsyncTokenCredential] + :keyword str api_version: + :rtype: None + """ + self._endpoint = endpoint + self._credential = credential + self._api_version = kwargs.pop('api_version', None) + + authentication_policy = get_authentication_policy(credential) + self._client = _BatchDocumentTranslationClient( + endpoint=endpoint, + credential=credential, # type: ignore + api_version=self._api_version, + sdk_moniker=USER_AGENT, + authentication_policy=authentication_policy, + polling_interval=5, # TODO what is appropriate polling interval + **kwargs + ) + + @distributed_trace_async + async def create_translation_job(self, batch, **kwargs): + # type: (List[BatchDocumentInput], **Any) -> JobStatusDetail + """ + + :param batch: + :type batch: List[~azure.ai.documenttranslation.BatchDocumentInput] + :return: JobStatusDetail + :rtype: JobStatusDetail + """ + + return await self._client.document_translation.begin_submit_batch_request( + inputs=batch, + polling=True, + **kwargs + ) + + @distributed_trace_async + async def get_job_status(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail + """ + + :param job_id: guid id for job + :type job_id: str + :rtype: ~azure.ai.documenttranslation.JobStatusDetail + """ + + return await self._client.document_translation.get_operation_status(job_id, **kwargs) + + @distributed_trace_async + async def cancel_job(self, job_id, **kwargs): + # type: (str, **Any) -> None + """ + + :param job_id: guid id for job + :type job_id: str + :rtype: None + """ + + await self._client.document_translation.cancel_operation(job_id, **kwargs) + + @distributed_trace_async + async def wait_until_done(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail + """ + + :param job_id: guid id for job + :type job_id: str + :return: JobStatusDetail + :rtype: JobStatusDetail + """ + pass + + @distributed_trace + def list_submitted_jobs(self, **kwargs): + # type: (**Any) -> AsyncItemPaged[JobStatusDetail] + """ + + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.polling.AsyncItemPaged[JobStatusDetail] + """ + return self._client.document_translation.get_operations(**kwargs) + + @distributed_trace + def list_documents_statuses(self, job_id, **kwargs): + # type: (str, **Any) -> AsyncItemPaged[DocumentStatusDetail] + """ + + :param job_id: guid id for job + :type job_id: str + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.paging.AsyncItemPaged[DocumentStatusDetail] + """ + + return self._client.document_translation.get_operation_documents_status(job_id, **kwargs) + + @distributed_trace_async + async def get_document_status(self, job_id, document_id, **kwargs): + # type: (str, str, **Any) -> DocumentStatusDetail + """ + + :param job_id: guid id for job + :type job_id: str + :param document_id: guid id for document + :type document_id: str + :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail + """ + return await self._client.document_translation.get_document_status(job_id, document_id, **kwargs) + + @distributed_trace_async + async def get_supported_storage_sources(self, **kwargs): + # type: (**Any) -> List[str] + """ + + :rtype: list[str] + """ + return await self._client.document_translation.get_document_storage_source(**kwargs) + + @distributed_trace_async + async def get_supported_glossary_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: list[FileFormat] + """ + + return await self._client.document_translation.get_glossary_formats(**kwargs) + + @distributed_trace_async + async def get_supported_document_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: list[FileFormat] + """ + + return await self._client.document_translation.get_document_formats(**kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/py.typed b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/dev_requirements.txt b/sdk/documenttranslation/azure-ai-documenttranslation/dev_requirements.txt new file mode 100644 index 000000000000..2e614298d253 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/dev_requirements.txt @@ -0,0 +1,5 @@ +-e ../../../tools/azure-sdk-tools +-e ../../../tools/azure-devtools +../../core/azure-core +-e ../../identity/azure-identity +aiohttp>=3.0; python_version >= '3.5' diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_async.py new file mode 100644 index 000000000000..5e9897646319 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_async.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + + +class BatchTranslationSampleAsync(object): + + async def batch_translation_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url_en = os.environ["AZURE_SOURCE_CONTAINER_URL_EN"] + source_container_url_de = os.environ["AZURE_SOURCE_CONTAINER_URL_DE"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + # create service client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # prepare translation job input + batch = [ + BatchDocumentInput( + source_url=source_container_url_en, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ] + ), + BatchDocumentInput( + source_url=source_container_url_de, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ] + ) + ] + + # run translation job + async with client: + job_detail = await client.create_translation_job(batch) # type: JobStatusDetail + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + # get job result + job_result = await client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + await self.check_documents(client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + await self.check_documents(client, job_result.id) + exit(1) + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +async def main(): + sample = BatchTranslationSampleAsync() + await sample.batch_translation_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_with_storage_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_with_storage_async.py new file mode 100644 index 000000000000..fa8af0255101 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_with_storage_async.py @@ -0,0 +1,142 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + + +class BatchTranslationWithStorageSampleAsync(object): + + async def batch_translation_with_storage_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + from azure.storage.blob.aio import ContainerClient + from azure.storage.blob import ( + generate_container_sas, + ContainerSasPermissions + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_storage_endpoint = os.environ["AZURE_STORAGE_SOURCE_ENDPOINT"] + source_storage_account_name = os.environ["AZURE_STORAGE_SOURCE_ACCOUNT_NAME"] + source_storage_container_name = os.environ["AZURE_STORAGE_SOURCE_CONTAINER_NAME"] + source_storage_key = os.environ["AZURE_STORAGE_SOURCE_KEY"] + target_storage_endpoint = os.environ["AZURE_STORAGE_TARGET_ENDPOINT"] + target_storage_account_name = os.environ["AZURE_STORAGE_TARGET_ACCOUNT_NAME"] + target_storage_container_name = os.environ["AZURE_STORAGE_TARGET_CONTAINER_NAME"] + target_storage_key = os.environ["AZURE_STORAGE_TARGET_KEY"] + + # create service clients + translation_client = DocumentTranslationClient( + endpoint, AzureKeyCredential(key) + ) + + container_client = ContainerClient( + source_storage_endpoint, + container_name=source_storage_container_name, + credential=source_storage_key + ) + + # upload some document for translation + with open("document.txt", "rb") as doc: + await container_client.upload_blob(name="document.txt", data=doc) + + # prepare translation job input + source_container_sas = generate_container_sas( + account_name=source_storage_account_name, + container_name=source_storage_container_name, + account_key=source_storage_key, + permission=ContainerSasPermissions.from_string("rl") + ) + + target_container_sas = generate_container_sas( + account_name=target_storage_account_name, + container_name=target_storage_container_name, + account_key=target_storage_key, + permission=ContainerSasPermissions.from_string("rlwd") + ) + + source_container_url = source_storage_endpoint + "/" + source_storage_container_name + "?" + source_container_sas + target_container_url = target_storage_endpoint + "/" + target_storage_container_name + "?" + target_container_sas + + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url, + language="es" + ) + ], + prefix="document" + ) + ] + + # run job + async with translation_client: + job_detail = await translation_client.create_translation_job(batch) + job_result = await translation_client.wait_until_done(job_detail.id) + + # poll status result + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + await self.check_documents(translation_client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + await self.check_documents(translation_client, job_result.id) + exit(1) + + # store result documents + container_client = ContainerClient( + target_storage_endpoint, + container_name=target_storage_container_name, + credential=target_storage_key + ) + + with open("translated.txt", "wb") as my_blob: + download_stream = await container_client.download_blob("document.txt") + my_blob.write(await download_stream.readall()) + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + +async def main(): + sample = BatchTranslationWithStorageSampleAsync() + await sample.batch_translation_with_storage_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_cancel_translation_job_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_cancel_translation_job_async.py new file mode 100644 index 000000000000..2cb61c61e33d --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_cancel_translation_job_async.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + +class CancelTranslationJobSampleAsync(object): + + async def cancel_translation_job_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + + # prepare translation job input + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ) + ], + storage_type="file" + ) + ] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # run job + async with client: + job_detail = await client.create_translation_job(batch) + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + await client.cancel_job(job_detail.id) + job_detail = await client.get_job_status(job_detail.id) # type: JobStatusDetail + + if job_detail.status in ["Cancelled", "Cancelling"]: + print("We cancelled job with ID: {}".format(job_detail.id)) + + +async def main(): + sample = CancelTranslationJobSampleAsync() + await sample.cancel_translation_job_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) + + diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_custom_translation_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_custom_translation_async.py new file mode 100644 index 000000000000..6d0ec8d2d815 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_custom_translation_async.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + +class CustomTranslationSampleAsync(object): + + async def custom_translation_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + category_id = os.environ["AZURE_DOCUMENT_TRANSLATION_MODEL_ID"] + + # prepare translation job input + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_fr, + language="fr", + category_id=category_id, + glossaries=["https://exampleglossary"] + ) + ], + prefix="document_2021" + ) + ] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # run translation job + async with client: + job_detail = await client.create_translation_job(batch) + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + job_result = await client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + await self.check_documents(client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + await self.check_documents(client, job_result.id) + exit(1) + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +async def main(): + sample = CustomTranslationSampleAsync() + await sample.custom_translation_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_list_all_submitted_jobs_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_list_all_submitted_jobs_async.py new file mode 100644 index 000000000000..ecb983cfe869 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_list_all_submitted_jobs_async.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + +class ListAllSubmittedJobsSampleAsync(object): + + def list_all_submitted_jobs(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # list submitted jobs + jobs = client.list_submitted_jobs() # type: AsyncItemPaged[JobStatusDetail] + + async for job in jobs: + # wait for job to finish + if job.status in ["NotStarted", "Running"]: + job = client.wait_until_done(job.id) + + print("Job ID: {}".format(job.id)) + print("Job status: {}".format(job.status)) + print("Job created on: {}".format(job.created_on)) + print("Job last updated on: {}".format(job.last_updated_on)) + print("Total number of translations on documents: {}".format(job.documents_total_count)) + print("Total number of characters charged: {}".format(job.total_characters_charged)) + + print("Of total documents...") + print("{} failed".format(job.documents_failed_count)) + print("{} succeeded".format(job.documents_succeeded_count)) + print("{} in progress".format(job.documents_in_progress_count)) + print("{} not yet started".format(job.documents_not_yet_started_count)) + print("{} cancelled".format(job.documents_cancelled_count)) + + +async def main(): + sample = ListAllSubmittedJobsSampleAsync() + await sample.list_all_submitted_jobs() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_translation_status_checks_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_translation_status_checks_async.py new file mode 100644 index 000000000000..3e0dcef5b8bb --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_translation_status_checks_async.py @@ -0,0 +1,103 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio +import time + +class TranslationStatusChecksSampleAsync(object): + + async def translation_status_checks_async(self): + + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + # prepare translation input + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ], + storage_type="folder", + prefix="document_2021" + ) + ] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # run translation job + async with client: + job_detail = await client.create_translation_job(batch) + while True: + job_detail = await client.get_job_status(job_detail.id) # type: JobStatusDetail + if job_detail.status in ["NotStarted", "Running"]: + await asyncio.sleep(30) + continue + + elif job_detail.status in ["Failed", "ValidationFailed"]: + if job_detail.error: + print("Translation job failed: {}: {}".format(job_detail.error.code, job_detail.error.message)) + await self.check_documents(client, job_detail.id) + exit(1) + + elif job_detail.status == "Succeeded": + print("We translated our documents!") + if job_detail.documents_failed_count > 0: + await self.check_documents(client, job_detail.id) + break + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +async def main(): + sample = TranslationStatusChecksSampleAsync() + await sample.translation_status_checks_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py new file mode 100644 index 000000000000..14bdd62b2d5b --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_batch_translation(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageTarget + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url_en = os.environ["AZURE_SOURCE_CONTAINER_URL_EN"] + source_container_url_de = os.environ["AZURE_SOURCE_CONTAINER_URL_DE"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source_url=source_container_url_en, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ] + ), + BatchDocumentInput( + source_url=source_container_url_de, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ] + ) + ] + + job_detail = client.create_translation_job(batch) # type: JobStatusDetail + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + job_result = client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + check_documents(client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + check_documents(client, job_result.id) + exit(1) + + +def check_documents(client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +if __name__ == '__main__': + sample_batch_translation() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation_with_storage.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation_with_storage.py new file mode 100644 index 000000000000..d0968c411378 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation_with_storage.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_batch_translation_with_storage(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageTarget + ) + from azure.storage.blob import ContainerClient, generate_container_sas, ContainerSasPermissions + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_storage_endpoint = os.environ["AZURE_STORAGE_SOURCE_ENDPOINT"] + source_storage_account_name = os.environ["AZURE_STORAGE_SOURCE_ACCOUNT_NAME"] + source_storage_container_name = os.environ["AZURE_STORAGE_SOURCE_CONTAINER_NAME"] + source_storage_key = os.environ["AZURE_STORAGE_SOURCE_KEY"] + target_storage_endpoint = os.environ["AZURE_STORAGE_TARGET_ENDPOINT"] + target_storage_account_name = os.environ["AZURE_STORAGE_TARGET_ACCOUNT_NAME"] + target_storage_container_name = os.environ["AZURE_STORAGE_TARGET_CONTAINER_NAME"] + target_storage_key = os.environ["AZURE_STORAGE_TARGET_KEY"] + + translation_client = DocumentTranslationClient( + endpoint, AzureKeyCredential(key) + ) + + container_client = ContainerClient( + source_storage_endpoint, + container_name=source_storage_container_name, + credential=source_storage_key + ) + + with open("document.txt", "rb") as doc: + container_client.upload_blob("document.txt", doc) + + source_container_sas = generate_container_sas( + account_name=source_storage_account_name, + container_name=source_storage_container_name, + account_key=source_storage_key, + permission=ContainerSasPermissions.from_string("rl") + ) + + target_container_sas = generate_container_sas( + account_name=target_storage_account_name, + container_name=target_storage_container_name, + account_key=target_storage_key, + permission=ContainerSasPermissions.from_string("rlwd") + ) + + source_container_url = source_storage_endpoint + "/" + source_storage_container_name + "?" + source_container_sas + target_container_url = target_storage_endpoint + "/" + target_storage_container_name + "?" + target_container_sas + + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url, + language="es" + ) + ], + prefix="document" + ) + ] + + job_detail = translation_client.create_translation_job(batch) + job_result = translation_client.wait_until_done(job_detail.id) + + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + check_documents(translation_client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + check_documents(translation_client, job_result.id) + exit(1) + + container_client = ContainerClient( + target_storage_endpoint, + container_name=target_storage_container_name, + credential=target_storage_key + ) + + target_container_client = container_client.from_container_url(target_container_url) + + with open("translated.txt", "wb") as my_blob: + download_stream = target_container_client.download_blob("document.txt") + my_blob.write(download_stream.readall()) + + +def check_documents(client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +if __name__ == '__main__': + sample_batch_translation_with_storage() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py new file mode 100644 index 000000000000..4ed96a711b31 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_cancel_translation_job(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageTarget + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ) + ], + storage_type="file" + ) + ] + + job_detail = client.create_translation_job(batch) # type: JobStatusDetail + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + client.cancel_job(job_detail.id) + job_detail = client.get_job_status(job_detail.id) # type: JobStatusDetail + + if job_detail.status in ["Cancelled", "Cancelling"]: + print("We cancelled job with ID: {}".format(job_detail.id)) + + +if __name__ == '__main__': + sample_cancel_translation_job() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py new file mode 100644 index 000000000000..30321928eaca --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py @@ -0,0 +1,81 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_custom_translation(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageTarget + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + category_id = os.environ["AZURE_DOCUMENT_TRANSLATION_MODEL_ID"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_fr, + language="fr", + category_id=category_id, + glossaries=["https://exampleglossary"] + ) + ], + prefix="document_2021" + ) + ] + + job_detail = client.create_translation_job(batch) # type: JobStatusDetail + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + job_result = client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + check_documents(client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + check_documents(client, job_result.id) + exit(1) + + +def check_documents(client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +if __name__ == '__main__': + sample_custom_translation() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_all_submitted_jobs.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_all_submitted_jobs.py new file mode 100644 index 000000000000..d80d3de8a52d --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_all_submitted_jobs.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_list_all_submitted_jobs(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + jobs = client.list_submitted_jobs() # type: ItemPaged[JobStatusDetail] + + for job in jobs: + if job.status in ["NotStarted", "Running"]: + job = client.wait_until_done(job.id) + + print("Job ID: {}".format(job.id)) + print("Job status: {}".format(job.status)) + print("Job created on: {}".format(job.created_on)) + print("Job last updated on: {}".format(job.last_updated_on)) + print("Total number of translations on documents: {}".format(job.documents_total_count)) + print("Total number of characters charged: {}".format(job.total_characters_charged)) + + print("Of total documents...") + print("{} failed".format(job.documents_failed_count)) + print("{} succeeded".format(job.documents_succeeded_count)) + print("{} in progress".format(job.documents_in_progress_count)) + print("{} not yet started".format(job.documents_not_yet_started_count)) + print("{} cancelled".format(job.documents_cancelled_count)) + + +if __name__ == '__main__': + sample_list_all_submitted_jobs() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_translation_status_checks.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_translation_status_checks.py new file mode 100644 index 000000000000..596b373db915 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_translation_status_checks.py @@ -0,0 +1,88 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_translation_status_checks(): + import os + import time + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageTarget + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ], + storage_type="folder", + prefix="document_2021" + ) + ] + + job_detail = client.create_translation_job(batch) + + while True: + job_detail = client.get_job_status(job_detail.id) # type: JobStatusDetail + if job_detail.status in ["NotStarted", "Running"]: + time.sleep(30) + continue + + elif job_detail.status in ["Failed", "ValidationFailed"]: + if job_detail.error: + print("Translation job failed: {}: {}".format(job_detail.error.code, job_detail.error.message)) + check_documents(client, job_detail.id) + exit(1) + + elif job_detail.status == "Succeeded": + print("We translated our documents!") + if job_detail.documents_failed_count > 0: + check_documents(client, job_detail.id) + break + + +def check_documents(client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +if __name__ == '__main__': + sample_translation_status_checks() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/sdk_packaging.toml b/sdk/documenttranslation/azure-ai-documenttranslation/sdk_packaging.toml new file mode 100644 index 000000000000..901bc8ccbfa6 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/sdk_packaging.toml @@ -0,0 +1,2 @@ +[packaging] +auto_update = false diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/setup.cfg b/sdk/documenttranslation/azure-ai-documenttranslation/setup.cfg new file mode 100644 index 000000000000..3c6e79cf31da --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/setup.py b/sdk/documenttranslation/azure-ai-documenttranslation/setup.py new file mode 100644 index 000000000000..d57b8bf7055b --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/setup.py @@ -0,0 +1,70 @@ +from setuptools import setup, find_packages +import os +from io import open +import re + +PACKAGE_NAME = "azure-ai-documenttranslation" +PACKAGE_PPRINT_NAME = "Document Translation Package" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace('-', '/') +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace('-', '.') + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', + fd.read(), re.MULTILINE).group(1) +if not version: + raise RuntimeError('Cannot find version information') + +with open('README.md', encoding='utf-8') as f: + long_description = f.read() + +setup( + name=PACKAGE_NAME, + version=version, + description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), + + # ensure that these are updated to reflect the package owners' information + long_description=long_description, + url='https://github.com/Azure/azure-sdk-for-python', + author='Microsoft Corporation', + author_email='azuresdkengsysadmins@microsoft.com', + + license='MIT License', + # ensure that the development status reflects the status of your package + classifiers=[ + "Development Status :: 4 - Beta", + + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'License :: OSI Approved :: MIT License', + ], + packages=find_packages(exclude=[ + 'tests', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + 'azure.ai', + ]), + install_requires=[ + "azure-core<2.0.0,>=1.8.2", + "msrest>=0.6.21", + 'six>=1.11.0', + 'azure-common~=1.1', + ], + extras_require={ + ":python_version<'3.0'": ['azure-ai-nspkg'], + ":python_version<'3.5'": ['typing'], + }, + project_urls={ + 'Bug Reports': 'https://github.com/Azure/azure-sdk-for-python/issues', + 'Source': 'https://github.com/Azure/azure-sdk-python', + } +) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/tests/testcase.py b/sdk/documenttranslation/azure-ai-documenttranslation/tests/testcase.py new file mode 100644 index 000000000000..fad27bf990b8 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/tests/testcase.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from devtools_testutils import ( + AzureTestCase, +) + +from azure_devtools.scenario_tests import ( + ReplayableTest +) + + +class DocumentTranslationTest(AzureTestCase): + FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['Ocp-Apim-Subscription-Key'] + + def __init__(self, method_name): + super(DocumentTranslationTest, self).__init__(method_name) + self.vcr.match_on = ["path", "method", "query"] + diff --git a/sdk/documenttranslation/ci.yml b/sdk/documenttranslation/ci.yml new file mode 100644 index 000000000000..feba1d074712 --- /dev/null +++ b/sdk/documenttranslation/ci.yml @@ -0,0 +1,32 @@ +# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file. + +trigger: + branches: + include: + - master + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/template/ + +pr: + branches: + include: + - master + - feature/* + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/template/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: documenttranslation + Artifacts: + - name: azure_ai_documenttranslation + safeName: azureaidocumenttranslation \ No newline at end of file diff --git a/sdk/documenttranslation/tests.yml b/sdk/documenttranslation/tests.yml new file mode 100644 index 000000000000..6d13570bfb33 --- /dev/null +++ b/sdk/documenttranslation/tests.yml @@ -0,0 +1,13 @@ +trigger: none + +jobs: + - template: ../../eng/pipelines/templates/jobs/archetype-sdk-tests.yml + parameters: + AllocateResourceGroup: 'false' + BuildTargetingString: azure-ai-documenttranslation + ServiceDirectory: documenttranslation + EnvVars: + AZURE_CLIENT_ID: $(aad-azure-sdk-test-client-id) + AZURE_CLIENT_SECRET: $(aad-azure-sdk-test-client-secret) + AZURE_TENANT_ID: $(aad-azure-sdk-test-tenant-id) + TEST_MODE: 'RunLiveNoRecord' \ No newline at end of file