From e4898c16896f24dcb6440df3d74c20dc69634622 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Fri, 19 Feb 2021 17:02:46 -0800 Subject: [PATCH 1/8] initial commit --- .../azure-ai-documenttranslation/CHANGELOG.md | 3 + .../azure-ai-documenttranslation/MANIFEST.in | 5 + .../azure-ai-documenttranslation/README.md | 35 + .../azure/__init__.py | 1 + .../azure/ai/__init__.py | 1 + .../azure/ai/documenttranslation/__init__.py | 41 + .../ai/documenttranslation/_api_version.py | 28 + .../azure/ai/documenttranslation/_client.py | 160 ++++ .../_generated/__init__.py | 16 + .../_batch_document_translation_client.py | 67 ++ .../_generated/_configuration.py | 68 ++ .../_generated/aio/__init__.py | 10 + .../aio/_batch_document_translation_client.py | 61 ++ .../_generated/aio/_configuration.py | 64 ++ .../_generated/aio/operations/__init__.py | 13 + .../_document_translation_operations.py | 751 +++++++++++++++++ .../_generated/models/__init__.py | 75 ++ ...batch_document_translation_client_enums.py | 64 ++ .../_generated/models/_models.py | 650 +++++++++++++++ .../_generated/models/_models_py3.py | 730 +++++++++++++++++ .../_generated/operations/__init__.py | 13 + .../_document_translation_operations.py | 765 ++++++++++++++++++ .../documenttranslation/_generated/py.typed | 1 + .../azure/ai/documenttranslation/_helpers.py | 25 + .../azure/ai/documenttranslation/_models.py | 273 +++++++ .../ai/documenttranslation/_user_agent.py | 9 + .../azure/ai/documenttranslation/_version.py | 7 + .../ai/documenttranslation/aio/__init__.py | 11 + .../documenttranslation/aio/_client_async.py | 162 ++++ .../azure/ai/documenttranslation/py.typed | 0 .../dev_requirements.txt | 5 + .../samples/sample_batch_translation.py | 74 ++ .../samples/sample_cancel_batch.py | 53 ++ .../samples/sample_check_statuses.py | 80 ++ .../samples/sample_custom_translation.py | 69 ++ .../samples/sample_list_batches.py | 34 + .../sdk_packaging.toml | 2 + .../azure-ai-documenttranslation/setup.cfg | 2 + .../azure-ai-documenttranslation/setup.py | 71 ++ .../tests/testcase.py | 22 + sdk/documenttranslation/ci.yml | 32 + sdk/documenttranslation/tests.yml | 13 + 42 files changed, 4566 insertions(+) create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/CHANGELOG.md create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/MANIFEST.in create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/README.md create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_batch_document_translation_client.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_configuration.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_batch_document_translation_client.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_configuration.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/_document_translation_operations.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_batch_document_translation_client_enums.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models_py3.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/_document_translation_operations.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/py.typed create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_user_agent.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_version.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/__init__.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/py.typed create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/dev_requirements.txt create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/sdk_packaging.toml create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/setup.cfg create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/setup.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/tests/testcase.py create mode 100644 sdk/documenttranslation/ci.yml create mode 100644 sdk/documenttranslation/tests.yml diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/CHANGELOG.md b/sdk/documenttranslation/azure-ai-documenttranslation/CHANGELOG.md new file mode 100644 index 000000000000..332564950c28 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/CHANGELOG.md @@ -0,0 +1,3 @@ +# Release History + +## 1.0.0b1 (Unreleased) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/MANIFEST.in b/sdk/documenttranslation/azure-ai-documenttranslation/MANIFEST.in new file mode 100644 index 000000000000..ded513877297 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/MANIFEST.in @@ -0,0 +1,5 @@ +recursive-include tests *.py +recursive-include samples *.py *.md +include *.md +include azure/__init__.py +include azure/ai/__init__.py diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/README.md b/sdk/documenttranslation/azure-ai-documenttranslation/README.md new file mode 100644 index 000000000000..92c7ca73c592 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/README.md @@ -0,0 +1,35 @@ +[![Build Status](https://dev.azure.com/azure-sdk/public/_apis/build/status/azure-sdk-for-python.client?branchName=master)](https://dev.azure.com/azure-sdk/public/_build/latest?definitionId=46?branchName=master) + +# Azure Template Package client library for Python + +This template package matches necessary patterns that the development team has established to create a unified sdk functional from Python 2.7 onwards. The packages contained herein can be installed singly or as part of the `azure` namespace. Any other introductory text should go here. + +This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8 + +For a more complete set of Azure libraries, see https://aka.ms/azsdk/python/all + +# Getting started + +For a rich example of a well formatted readme, please check [here.](https://github.com/Azure/azure-sdk/blob/master/docs/policies/README-TEMPLATE.md) In addition, this is an [example readme](https://github.com/Azure/azure-sdk/blob/master/docs/policies/README-EXAMPLE.md) that should be emulated. Note that the top-level sections in this template align with that of the [template.](https://github.com/Azure/azure-sdk/blob/master/docs/policies/README-TEMPLATE.md) + +# Key concepts + +Bullet point list of your library's main concepts. + +# Examples + +Examples of some of the key concepts for your library. + +# Troubleshooting + +Running into issues? This section should contain details as to what to do there. + +# Next steps + +More sample code should go here, along with links out to the appropriate example tests. + +# Contributing + +If you encounter any bugs or have suggestions, please file an issue in the [Issues]() section of the project. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fsdk%2Ftemplate%2Fazure-template%2FREADME.png) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/__init__.py new file mode 100644 index 000000000000..5960c353a898 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/__init__.py new file mode 100644 index 000000000000..5960c353a898 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py new file mode 100644 index 000000000000..a439cb77b539 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from ._version import VERSION +from ._client import DocumentTranslationClient +from ._generated.models import ( + StorageInputType, +) +from ._api_version import DocumentTranslationVersion +from ._models import ( + StorageSourceInput, + StorageTargetInput, + BatchStatusDetail, + DocumentStatusDetail, + DocumentTranslationError, + TranslationGlossary, + BatchDocumentInput, + StatusSummary, + FileFormat +) + +__VERSION__ = VERSION + + +__all__ = [ + "DocumentTranslationClient", + "DocumentTranslationVersion", + "BatchDocumentInput", + "TranslationGlossary", + "StorageInputType", + "StatusSummary", + "FileFormat", + "StorageSourceInput", + "StorageTargetInput", + "BatchStatusDetail", + "DocumentStatusDetail", + "DocumentTranslationError" +] \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py new file mode 100644 index 000000000000..7ef6fa8c09dd --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py @@ -0,0 +1,28 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from enum import Enum + + +class DocumentTranslationVersion(str, Enum): + """Document Translation API versions supported by this package""" + + #: This is the default version + V1_1_PREVIEW = "1.0-preview.1" + + +def validate_api_version(api_version): + # type: (str) -> None + """Raise ValueError if api_version is invalid """ + if not api_version: + return + + try: + api_version = DocumentTranslationVersion(api_version) + except ValueError: + raise ValueError( + "Unsupported API version '{}'. Please select from:\n{}".format( + api_version, ", ".join(v.value for v in DocumentTranslationVersion)) + ) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py new file mode 100644 index 000000000000..18c52d99c19f --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py @@ -0,0 +1,160 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from typing import Union, Any, TYPE_CHECKING, List +from azure.core.polling import LROPoller +from azure.core.tracing.decorator import distributed_trace +from ._generated import BatchDocumentTranslationClient as _BatchDocumentTranslationClient +from ._helpers import get_authentication_policy +from ._user_agent import USER_AGENT +if TYPE_CHECKING: + from azure.core.paging import ItemPaged + from azure.core.credentials import AzureKeyCredential, TokenCredential + from ._models import BatchStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat + + +class DocumentTranslationClient(object): + """DocumentTranslationClient + + """ + + def __init__(self, endpoint, credential, **kwargs): + # type: (str, Union[AzureKeyCredential, TokenCredential], **Any) -> None + """ + + :param str endpoint: + :param credential: + :type credential: Union[AzureKeyCredential, TokenCredential] + :keyword str api_version: + """ + self._endpoint = endpoint + self._credential = credential + self._api_version = kwargs.pop('api_version', None) + + authentication_policy = get_authentication_policy(credential) + self._client = _BatchDocumentTranslationClient( + endpoint=endpoint, + credential=credential, # type: ignore + api_version=self._api_version, + sdk_moniker=USER_AGENT, + authentication_policy=authentication_policy, + polling_interval=5, # TODO what is appropriate polling interval + **kwargs + ) + + @distributed_trace + def begin_batch_translation(self, inputs, **kwargs): + # type: (List[BatchDocumentInput], **Any) -> LROPoller[BatchStatusDetail] + """ + + :param inputs: + :type inputs: List[~azure.ai.documenttranslation.BatchDocumentInput] + :return: LROPoller + :rtype: ~azure.core.polling.LROPoller[BatchStatusDetail] + """ + + return self._client.document_translation.begin_submit_batch_request( + inputs=inputs, + polling=True, + **kwargs + ) + + @distributed_trace + def get_batch_status(self, batch_id, **kwargs): + # type: (Union[LROPoller, str], **Any) -> BatchStatusDetail + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.LROPoller, str] + :rtype: ~azure.ai.documenttranslation.BatchStatusDetail + """ + if isinstance(batch_id, LROPoller): + batch_id = batch_id.batch_id + + return self._client.document_translation.get_operation_status(batch_id, **kwargs) + + @distributed_trace + def cancel_batch(self, batch_id, **kwargs): + # type: (Union[LROPoller, str], **Any) -> None + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.LROPoller, str] + :rtype: None + """ + if isinstance(batch_id, LROPoller): + batch_id = batch_id.batch_id + + self._client.document_translation.cancel_operation(batch_id, **kwargs) + + @distributed_trace + def list_batches(self, **kwargs): + # type: (**Any) -> ItemPaged[BatchStatusDetail] + """ + + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.polling.ItemPaged[BatchStatusDetail] + """ + return self._client.document_translation.get_operations(**kwargs) + + @distributed_trace + def list_documents_statuses(self, batch_id, **kwargs): + # type: (Union[LROPoller, str], **Any) -> ItemPaged[DocumentStatusDetail] + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.LROPoller, str] + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.paging.ItemPaged[DocumentStatusDetail] + """ + if isinstance(batch_id, LROPoller): + batch_id = batch_id.batch_id + + return self._client.document_translation.get_operation_documents_status(batch_id, **kwargs) + + @distributed_trace + def get_document_status(self, batch_id, document_id, **kwargs): + # type: (Union[LROPoller, str], str, **Any) -> DocumentStatusDetail + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.LROPoller, str] + :param document_id: guid id for document + :type document_id: str + :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail + """ + return self._client.document_translation.get_document_status(batch_id, document_id, **kwargs) + + @distributed_trace + def get_supported_storage_sources(self, **kwargs): + # type: (**Any) -> List[str] + """ + + :rtype: List[str] + """ + return self._client.document_translation.get_document_storage_source(**kwargs) + + @distributed_trace + def get_supported_glossary_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: List[FileFormat] + """ + + return self._client.document_translation.get_glossary_formats(**kwargs) + + @distributed_trace + def get_supported_document_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: List[FileFormat] + """ + + return self._client.document_translation.get_document_formats(**kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/__init__.py new file mode 100644 index 000000000000..5a2929b79fb0 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._batch_document_translation_client import BatchDocumentTranslationClient +__all__ = ['BatchDocumentTranslationClient'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_batch_document_translation_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_batch_document_translation_client.py new file mode 100644 index 000000000000..2d807beae995 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_batch_document_translation_client.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core import PipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + +from ._configuration import BatchDocumentTranslationClientConfiguration +from .operations import DocumentTranslationOperations +from . import models + + +class BatchDocumentTranslationClient(object): + """BatchDocumentTranslationClient. + + :ivar document_translation: DocumentTranslationOperations operations + :vartype document_translation: azure.ai.documenttranslation.operations.DocumentTranslationOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + endpoint, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + base_url = '{endpoint}/translator/text/batch/v1.0-preview.1' + self._config = BatchDocumentTranslationClientConfiguration(credential, endpoint, **kwargs) + self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.document_translation = DocumentTranslationOperations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> BatchDocumentTranslationClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_configuration.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_configuration.py new file mode 100644 index 000000000000..159aa8c93a94 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/_configuration.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + +VERSION = "unknown" + +class BatchDocumentTranslationClientConfiguration(Configuration): + """Configuration for BatchDocumentTranslationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + endpoint, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + super(BatchDocumentTranslationClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.endpoint = endpoint + self.credential_scopes = kwargs.pop('credential_scopes', ['https://cognitiveservices.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'ai-documenttranslation/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/__init__.py new file mode 100644 index 000000000000..05bf6f3f777c --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._batch_document_translation_client import BatchDocumentTranslationClient +__all__ = ['BatchDocumentTranslationClient'] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_batch_document_translation_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_batch_document_translation_client.py new file mode 100644 index 000000000000..6fcca5e7db28 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_batch_document_translation_client.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core import AsyncPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import BatchDocumentTranslationClientConfiguration +from .operations import DocumentTranslationOperations +from .. import models + + +class BatchDocumentTranslationClient(object): + """BatchDocumentTranslationClient. + + :ivar document_translation: DocumentTranslationOperations operations + :vartype document_translation: azure.ai.documenttranslation.aio.operations.DocumentTranslationOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + endpoint: str, + **kwargs: Any + ) -> None: + base_url = '{endpoint}/translator/text/batch/v1.0-preview.1' + self._config = BatchDocumentTranslationClientConfiguration(credential, endpoint, **kwargs) + self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.document_translation = DocumentTranslationOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "BatchDocumentTranslationClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_configuration.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_configuration.py new file mode 100644 index 000000000000..f90f307734cb --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/_configuration.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +VERSION = "unknown" + +class BatchDocumentTranslationClientConfiguration(Configuration): + """Configuration for BatchDocumentTranslationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com). + :type endpoint: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + endpoint: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + super(BatchDocumentTranslationClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.endpoint = endpoint + self.credential_scopes = kwargs.pop('credential_scopes', ['https://cognitiveservices.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'ai-documenttranslation/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/__init__.py new file mode 100644 index 000000000000..e524e2215fb7 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/__init__.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._document_translation_operations import DocumentTranslationOperations + +__all__ = [ + 'DocumentTranslationOperations', +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/_document_translation_operations.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/_document_translation_operations.py new file mode 100644 index 000000000000..b8fd6dc69d8c --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/aio/operations/_document_translation_operations.py @@ -0,0 +1,751 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.polling.async_base_polling import AsyncLROBasePolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DocumentTranslationOperations: + """DocumentTranslationOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.ai.documenttranslation.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _submit_batch_request_initial( + self, + inputs: List["_models.BatchRequest"], + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + + _body = _models.BatchSubmissionRequest(inputs=inputs) + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._submit_batch_request_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if _body is not None: + body_content = self._serialize.body(_body, 'BatchSubmissionRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Operation-Location']=self._deserialize('str', response.headers.get('Operation-Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + _submit_batch_request_initial.metadata = {'url': '/batches'} # type: ignore + + async def begin_submit_batch_request( + self, + inputs: List["_models.BatchRequest"], + **kwargs + ) -> AsyncLROPoller[None]: + """Submit a document translation request to the Document Translation service. + + Use this API to submit a bulk (batch) translation request to the Document Translation service. + Each request can contain multiple documents and must contain a source and destination container + for each document. + + The prefix and suffix filter (if supplied) are used to filter folders. The prefix is applied to + the subpath after the container name. + + Glossaries / Translation memory can be included in the request and are applied by the service + when the document is translated. + + If the glossary is invalid or unreachable during translation, an error is indicated in the + document status. + If a file with the same name already exists at the destination, it will be overwritten. The + targetUrl for each target language must be unique. + + :param inputs: The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the AsyncLROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._submit_batch_request_initial( + inputs=inputs, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + + if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_submit_batch_request.metadata = {'url': '/batches'} # type: ignore + + def get_operations( + self, + top: Optional[int] = 50, + skip: Optional[int] = 0, + **kwargs + ) -> AsyncIterable["_models.BatchStatusResponse"]: + """Returns a list of batch requests submitted and the status for each request. + + Returns a list of batch requests submitted and the status for each request. + This list only contains batch requests submitted by the user (based on the subscription). The + status for each request is sorted by id. + + If the number of requests exceeds our paging limit, server-side paging is used. Paginated + responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchStatusResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.documenttranslation.models.BatchStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusResponse"] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operations.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('BatchStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + get_operations.metadata = {'url': '/batches'} # type: ignore + + async def get_document_status( + self, + id: str, + document_id: str, + **kwargs + ) -> "_models.DocumentStatusDetail": + """Returns the status for a specific document. + + Returns the translation status for a specific document based on the request Id and document Id. + + :param id: Format - uuid. The batch id. + :type id: str + :param document_id: Format - uuid. The document id. + :type document_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DocumentStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.DocumentStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + 'documentId': self._serialize.url("document_id", document_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('DocumentStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_document_status.metadata = {'url': '/batches/{id}/documents/{documentId}'} # type: ignore + + async def get_operation_status( + self, + id: str, + **kwargs + ) -> "_models.BatchStatusDetail": + """Returns the status for a document translation request. + + Returns the status for a document translation request. + The status includes the overall request status, as well as the status for documents that are + being translated as part of that request. + + :param id: Format - uuid. The operation id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_operation_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_operation_status.metadata = {'url': '/batches/{id}'} # type: ignore + + async def cancel_operation( + self, + id: str, + **kwargs + ) -> "_models.BatchStatusDetail": + """Cancel a currently processing or queued operation. + + Cancel a currently processing or queued operation. + Cancel a currently processing or queued operation. + An operation will not be cancelled if it is already completed or failed or cancelling. A bad + request will be returned. + All documents that have completed translation will not be cancelled and will be charged. + All pending documents will be cancelled if possible. + + :param id: Format - uuid. The operation-id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.cancel_operation.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + cancel_operation.metadata = {'url': '/batches/{id}'} # type: ignore + + def get_operation_documents_status( + self, + id: str, + top: Optional[int] = 50, + skip: Optional[int] = 0, + **kwargs + ) -> AsyncIterable["_models.DocumentStatusResponse"]: + """Returns the status for all documents in a batch document translation request. + + Returns the status for all documents in a batch document translation request. + + The documents included in the response are sorted by document Id in descending order. If the + number of documents in the response exceeds our paging limit, server-side paging is used. + Paginated responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param id: Format - uuid. The operation id. + :type id: str + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DocumentStatusResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.ai.documenttranslation.models.DocumentStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusResponse"] + error_map = { + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operation_documents_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DocumentStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + get_operation_documents_status.metadata = {'url': '/batches/{id}/documents'} # type: ignore + + async def get_document_formats( + self, + **kwargs + ) -> "_models.FileFormatListResult": + """Returns a list of supported document formats. + + The list of supported document formats supported by the Document Translation service. + The list includes the common file extension, as well as the content-type if using the upload + API. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_formats.metadata = {'url': '/documents/formats'} # type: ignore + + async def get_glossary_formats( + self, + **kwargs + ) -> "_models.FileFormatListResult": + """Returns the list of supported glossary formats. + + The list of supported glossary formats supported by the Document Translation service. + The list includes the common file extension used. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_glossary_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_glossary_formats.metadata = {'url': '/glossaries/formats'} # type: ignore + + async def get_document_storage_source( + self, + **kwargs + ) -> "_models.StorageSourceListResult": + """Returns a list of supported storage sources. + + Returns a list of storage sources/options supported by the Document Translation service. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageSourceListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.StorageSourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageSourceListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_storage_source.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('StorageSourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_storage_source.metadata = {'url': '/storagesources'} # type: ignore diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/__init__.py new file mode 100644 index 000000000000..b3ab9ad64a30 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/__init__.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import BatchRequest + from ._models_py3 import BatchStatusDetail + from ._models_py3 import BatchStatusResponse + from ._models_py3 import BatchSubmissionRequest + from ._models_py3 import DocumentFilter + from ._models_py3 import DocumentStatusDetail + from ._models_py3 import DocumentStatusResponse + from ._models_py3 import ErrorResponseV2 + from ._models_py3 import ErrorV2 + from ._models_py3 import FileFormat + from ._models_py3 import FileFormatListResult + from ._models_py3 import Glossary + from ._models_py3 import InnerErrorV2 + from ._models_py3 import SourceInput + from ._models_py3 import StatusSummary + from ._models_py3 import StorageSourceListResult + from ._models_py3 import TargetInput +except (SyntaxError, ImportError): + from ._models import BatchRequest # type: ignore + from ._models import BatchStatusDetail # type: ignore + from ._models import BatchStatusResponse # type: ignore + from ._models import BatchSubmissionRequest # type: ignore + from ._models import DocumentFilter # type: ignore + from ._models import DocumentStatusDetail # type: ignore + from ._models import DocumentStatusResponse # type: ignore + from ._models import ErrorResponseV2 # type: ignore + from ._models import ErrorV2 # type: ignore + from ._models import FileFormat # type: ignore + from ._models import FileFormatListResult # type: ignore + from ._models import Glossary # type: ignore + from ._models import InnerErrorV2 # type: ignore + from ._models import SourceInput # type: ignore + from ._models import StatusSummary # type: ignore + from ._models import StorageSourceListResult # type: ignore + from ._models import TargetInput # type: ignore + +from ._batch_document_translation_client_enums import ( + ErrorCodeV2, + Status, + StorageInputType, + StorageSource, +) + +__all__ = [ + 'BatchRequest', + 'BatchStatusDetail', + 'BatchStatusResponse', + 'BatchSubmissionRequest', + 'DocumentFilter', + 'DocumentStatusDetail', + 'DocumentStatusResponse', + 'ErrorResponseV2', + 'ErrorV2', + 'FileFormat', + 'FileFormatListResult', + 'Glossary', + 'InnerErrorV2', + 'SourceInput', + 'StatusSummary', + 'StorageSourceListResult', + 'TargetInput', + 'ErrorCodeV2', + 'Status', + 'StorageInputType', + 'StorageSource', +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_batch_document_translation_client_enums.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_batch_document_translation_client_enums.py new file mode 100644 index 000000000000..3226ec60d00d --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_batch_document_translation_client_enums.py @@ -0,0 +1,64 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class ErrorCodeV2(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enums containing high level error codes. + """ + + INVALID_REQUEST = "InvalidRequest" + INVALID_ARGUMENT = "InvalidArgument" + INTERNAL_SERVER_ERROR = "InternalServerError" + SERVICE_UNAVAILABLE = "ServiceUnavailable" + RESOURCE_NOT_FOUND = "ResourceNotFound" + UNAUTHORIZED = "Unauthorized" + REQUEST_RATE_TOO_HIGH = "RequestRateTooHigh" + +class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """List of possible statuses for job or document + """ + + NOT_STARTED = "NotStarted" + RUNNING = "Running" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELLED = "Cancelled" + CANCELLING = "Cancelling" + VALIDATION_FAILED = "ValidationFailed" + +class StorageInputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Storage type of the input documents source string + """ + + FOLDER = "Folder" + FILE = "File" + +class StorageSource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Storage Source + """ + + AZURE_BLOB = "AzureBlob" diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models.py new file mode 100644 index 000000000000..9a45dde69780 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models.py @@ -0,0 +1,650 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class BatchRequest(msrest.serialization.Model): + """Definition for the input batch translation request. + + All required parameters must be populated in order to send to Azure. + + :param source: Required. Source of the input documents. + :type source: ~azure.ai.documenttranslation.models.SourceInput + :param targets: Required. Location of the destination for the output. + :type targets: list[~azure.ai.documenttranslation.models.TargetInput] + :param storage_type: Storage type of the input documents source string. Possible values + include: "Folder", "File". + :type storage_type: str or ~azure.ai.documenttranslation.models.StorageInputType + """ + + _validation = { + 'source': {'required': True}, + 'targets': {'required': True}, + } + + _attribute_map = { + 'source': {'key': 'source', 'type': 'SourceInput'}, + 'targets': {'key': 'targets', 'type': '[TargetInput]'}, + 'storage_type': {'key': 'storageType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchRequest, self).__init__(**kwargs) + self.source = kwargs['source'] + self.targets = kwargs['targets'] + self.storage_type = kwargs.get('storage_type', None) + + +class BatchStatusDetail(msrest.serialization.Model): + """Job status response. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. Id of the operation. + :type id: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param summary: Required. + :type summary: ~azure.ai.documenttranslation.models.StatusSummary + """ + + _validation = { + 'id': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'summary': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'summary': {'key': 'summary', 'type': 'StatusSummary'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchStatusDetail, self).__init__(**kwargs) + self.id = kwargs['id'] + self.created_date_time_utc = kwargs['created_date_time_utc'] + self.last_action_date_time_utc = kwargs['last_action_date_time_utc'] + self.status = kwargs['status'] + self.error = kwargs.get('error', None) + self.summary = kwargs['summary'] + + +class BatchStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The summary status of individual operation. + :type value: list[~azure.ai.documenttranslation.models.BatchStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BatchStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchStatusResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class BatchSubmissionRequest(msrest.serialization.Model): + """Job submission batch request. + + All required parameters must be populated in order to send to Azure. + + :param inputs: Required. The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + """ + + _validation = { + 'inputs': {'required': True}, + } + + _attribute_map = { + 'inputs': {'key': 'inputs', 'type': '[BatchRequest]'}, + } + + def __init__( + self, + **kwargs + ): + super(BatchSubmissionRequest, self).__init__(**kwargs) + self.inputs = kwargs['inputs'] + + +class DocumentFilter(msrest.serialization.Model): + """DocumentFilter. + + :param prefix: A case-sensitive prefix string to filter documents in the source path for + translation. + For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for + translation. + :type prefix: str + :param suffix: A case-sensitive suffix string to filter documents in the source path for + translation. + This is most often use for file extensions. + :type suffix: str + """ + + _attribute_map = { + 'prefix': {'key': 'prefix', 'type': 'str'}, + 'suffix': {'key': 'suffix', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentFilter, self).__init__(**kwargs) + self.prefix = kwargs.get('prefix', None) + self.suffix = kwargs.get('suffix', None) + + +class DocumentStatusDetail(msrest.serialization.Model): + """DocumentStatusDetail. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. Location of the document or folder. + :type path: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param to: Required. To language. + :type to: str + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param progress: Required. Progress of the translation if available. + :type progress: float + :param id: Required. Document Id. + :type id: str + :param character_charged: Character charged by the API. + :type character_charged: long + """ + + _validation = { + 'path': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'to': {'required': True}, + 'progress': {'required': True, 'maximum': 1, 'minimum': 0}, + 'id': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'to': {'key': 'to', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'progress': {'key': 'progress', 'type': 'float'}, + 'id': {'key': 'id', 'type': 'str'}, + 'character_charged': {'key': 'characterCharged', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentStatusDetail, self).__init__(**kwargs) + self.path = kwargs['path'] + self.created_date_time_utc = kwargs['created_date_time_utc'] + self.last_action_date_time_utc = kwargs['last_action_date_time_utc'] + self.status = kwargs['status'] + self.to = kwargs['to'] + self.error = kwargs.get('error', None) + self.progress = kwargs['progress'] + self.id = kwargs['id'] + self.character_charged = kwargs.get('character_charged', None) + + +class DocumentStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The detail status of individual documents. + :type value: list[~azure.ai.documenttranslation.models.DocumentStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DocumentStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DocumentStatusResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class ErrorResponseV2(msrest.serialization.Model): + """Contains unified error information used for HTTP responses across any Cognitive Service. Instances +can be created either through Microsoft.CloudAI.Containers.HttpStatusExceptionV2 or by returning it directly from +a controller. + + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorV2'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponseV2, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorV2(msrest.serialization.Model): + """This contains an outer error with error code, message, details, target and an inner error with more descriptive details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Enums containing high level error codes. Possible values include: + "InvalidRequest", "InvalidArgument", "InternalServerError", "ServiceUnavailable", + "ResourceNotFound", "Unauthorized", "RequestRateTooHigh". + :type code: str or ~azure.ai.documenttranslation.models.ErrorCodeV2 + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorV2, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs['message'] + self.target = None + self.inner_error = kwargs.get('inner_error', None) + + +class FileFormat(msrest.serialization.Model): + """FileFormat. + + :param format: Name of the format. + :type format: str + :param file_extensions: Supported file extension for this format. + :type file_extensions: list[str] + :param content_types: Supported Content-Types for this format. + :type content_types: list[str] + :param versions: Supported Version. + :type versions: list[str] + """ + + _attribute_map = { + 'format': {'key': 'format', 'type': 'str'}, + 'file_extensions': {'key': 'fileExtensions', 'type': '[str]'}, + 'content_types': {'key': 'contentTypes', 'type': '[str]'}, + 'versions': {'key': 'versions', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(FileFormat, self).__init__(**kwargs) + self.format = kwargs.get('format', None) + self.file_extensions = kwargs.get('file_extensions', None) + self.content_types = kwargs.get('content_types', None) + self.versions = kwargs.get('versions', None) + + +class FileFormatListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[~azure.ai.documenttranslation.models.FileFormat] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileFormat]'}, + } + + def __init__( + self, + **kwargs + ): + super(FileFormatListResult, self).__init__(**kwargs) + self.value = kwargs['value'] + + +class Glossary(msrest.serialization.Model): + """Glossary / translation memory for the request. + + All required parameters must be populated in order to send to Azure. + + :param glossary_url: Required. Location of the glossary. + We will use the file extension to extract the formatting if the format parameter is not + supplied. + + If the translation language pair is not present in the glossary, it will not be applied. + :type glossary_url: str + :param format: Format. + :type format: str + :param version: Version. + :type version: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'glossary_url': {'required': True}, + } + + _attribute_map = { + 'glossary_url': {'key': 'glossaryUrl', 'type': 'str'}, + 'format': {'key': 'format', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Glossary, self).__init__(**kwargs) + self.glossary_url = kwargs['glossary_url'] + self.format = kwargs.get('format', None) + self.version = kwargs.get('version', None) + self.storage_source = kwargs.get('storage_source', None) + + +class InnerErrorV2(msrest.serialization.Model): + """New Inner Error format which conforms to Cognitive Services API Guidelines which is available at https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. +This contains required properties ErrorCode, message and optional properties target, details(key value pair), inner error(this can be nested). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Gets code error string. + :type code: str + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + **kwargs + ): + super(InnerErrorV2, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.target = None + self.inner_error = kwargs.get('inner_error', None) + + +class SourceInput(msrest.serialization.Model): + """Source of the input documents. + + All required parameters must be populated in order to send to Azure. + + :param source_url: Required. Location of the folder / container or single file with your + documents. + :type source_url: str + :param filter: + :type filter: ~azure.ai.documenttranslation.models.DocumentFilter + :param language: Language code + If none is specified, we will perform auto detect on the document. + :type language: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'source_url': {'required': True}, + } + + _attribute_map = { + 'source_url': {'key': 'sourceUrl', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'DocumentFilter'}, + 'language': {'key': 'language', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SourceInput, self).__init__(**kwargs) + self.source_url = kwargs['source_url'] + self.filter = kwargs.get('filter', None) + self.language = kwargs.get('language', None) + self.storage_source = kwargs.get('storage_source', None) + + +class StatusSummary(msrest.serialization.Model): + """StatusSummary. + + All required parameters must be populated in order to send to Azure. + + :param total: Required. Total count. + :type total: int + :param failed: Required. Failed count. + :type failed: int + :param success: Required. Number of Success. + :type success: int + :param in_progress: Required. Number of in progress. + :type in_progress: int + :param not_yet_started: Required. Count of not yet started. + :type not_yet_started: int + :param cancelled: Required. Number of cancelled. + :type cancelled: int + :param total_character_charged: Required. Total characters charged by the API. + :type total_character_charged: long + """ + + _validation = { + 'total': {'required': True}, + 'failed': {'required': True}, + 'success': {'required': True}, + 'in_progress': {'required': True}, + 'not_yet_started': {'required': True}, + 'cancelled': {'required': True}, + 'total_character_charged': {'required': True}, + } + + _attribute_map = { + 'total': {'key': 'total', 'type': 'int'}, + 'failed': {'key': 'failed', 'type': 'int'}, + 'success': {'key': 'success', 'type': 'int'}, + 'in_progress': {'key': 'inProgress', 'type': 'int'}, + 'not_yet_started': {'key': 'notYetStarted', 'type': 'int'}, + 'cancelled': {'key': 'cancelled', 'type': 'int'}, + 'total_character_charged': {'key': 'totalCharacterCharged', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(StatusSummary, self).__init__(**kwargs) + self.total = kwargs['total'] + self.failed = kwargs['failed'] + self.success = kwargs['success'] + self.in_progress = kwargs['in_progress'] + self.not_yet_started = kwargs['not_yet_started'] + self.cancelled = kwargs['cancelled'] + self.total_character_charged = kwargs['total_character_charged'] + + +class StorageSourceListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[str or ~azure.ai.documenttranslation.models.StorageSource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageSourceListResult, self).__init__(**kwargs) + self.value = kwargs['value'] + + +class TargetInput(msrest.serialization.Model): + """Destination for the finished translated documents. + + All required parameters must be populated in order to send to Azure. + + :param target_url: Required. Location of the folder / container with your documents. + :type target_url: str + :param category: Category / custom system for translation request. + :type category: str + :param language: Required. Target Language. + :type language: str + :param glossaries: List of Glossary. + :type glossaries: list[~azure.ai.documenttranslation.models.Glossary] + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'target_url': {'required': True}, + 'language': {'required': True}, + } + + _attribute_map = { + 'target_url': {'key': 'targetUrl', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'language': {'key': 'language', 'type': 'str'}, + 'glossaries': {'key': 'glossaries', 'type': '[Glossary]'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TargetInput, self).__init__(**kwargs) + self.target_url = kwargs['target_url'] + self.category = kwargs.get('category', None) + self.language = kwargs['language'] + self.glossaries = kwargs.get('glossaries', None) + self.storage_source = kwargs.get('storage_source', None) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models_py3.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models_py3.py new file mode 100644 index 000000000000..5502c4b68467 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/models/_models_py3.py @@ -0,0 +1,730 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._batch_document_translation_client_enums import * + + +class BatchRequest(msrest.serialization.Model): + """Definition for the input batch translation request. + + All required parameters must be populated in order to send to Azure. + + :param source: Required. Source of the input documents. + :type source: ~azure.ai.documenttranslation.models.SourceInput + :param targets: Required. Location of the destination for the output. + :type targets: list[~azure.ai.documenttranslation.models.TargetInput] + :param storage_type: Storage type of the input documents source string. Possible values + include: "Folder", "File". + :type storage_type: str or ~azure.ai.documenttranslation.models.StorageInputType + """ + + _validation = { + 'source': {'required': True}, + 'targets': {'required': True}, + } + + _attribute_map = { + 'source': {'key': 'source', 'type': 'SourceInput'}, + 'targets': {'key': 'targets', 'type': '[TargetInput]'}, + 'storage_type': {'key': 'storageType', 'type': 'str'}, + } + + def __init__( + self, + *, + source: "SourceInput", + targets: List["TargetInput"], + storage_type: Optional[Union[str, "StorageInputType"]] = None, + **kwargs + ): + super(BatchRequest, self).__init__(**kwargs) + self.source = source + self.targets = targets + self.storage_type = storage_type + + +class BatchStatusDetail(msrest.serialization.Model): + """Job status response. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. Id of the operation. + :type id: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param summary: Required. + :type summary: ~azure.ai.documenttranslation.models.StatusSummary + """ + + _validation = { + 'id': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'summary': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'summary': {'key': 'summary', 'type': 'StatusSummary'}, + } + + def __init__( + self, + *, + id: str, + created_date_time_utc: datetime.datetime, + last_action_date_time_utc: datetime.datetime, + status: Union[str, "Status"], + summary: "StatusSummary", + error: Optional["ErrorV2"] = None, + **kwargs + ): + super(BatchStatusDetail, self).__init__(**kwargs) + self.id = id + self.created_date_time_utc = created_date_time_utc + self.last_action_date_time_utc = last_action_date_time_utc + self.status = status + self.error = error + self.summary = summary + + +class BatchStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The summary status of individual operation. + :type value: list[~azure.ai.documenttranslation.models.BatchStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[BatchStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["BatchStatusDetail"], + next_link: Optional[str] = None, + **kwargs + ): + super(BatchStatusResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class BatchSubmissionRequest(msrest.serialization.Model): + """Job submission batch request. + + All required parameters must be populated in order to send to Azure. + + :param inputs: Required. The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + """ + + _validation = { + 'inputs': {'required': True}, + } + + _attribute_map = { + 'inputs': {'key': 'inputs', 'type': '[BatchRequest]'}, + } + + def __init__( + self, + *, + inputs: List["BatchRequest"], + **kwargs + ): + super(BatchSubmissionRequest, self).__init__(**kwargs) + self.inputs = inputs + + +class DocumentFilter(msrest.serialization.Model): + """DocumentFilter. + + :param prefix: A case-sensitive prefix string to filter documents in the source path for + translation. + For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for + translation. + :type prefix: str + :param suffix: A case-sensitive suffix string to filter documents in the source path for + translation. + This is most often use for file extensions. + :type suffix: str + """ + + _attribute_map = { + 'prefix': {'key': 'prefix', 'type': 'str'}, + 'suffix': {'key': 'suffix', 'type': 'str'}, + } + + def __init__( + self, + *, + prefix: Optional[str] = None, + suffix: Optional[str] = None, + **kwargs + ): + super(DocumentFilter, self).__init__(**kwargs) + self.prefix = prefix + self.suffix = suffix + + +class DocumentStatusDetail(msrest.serialization.Model): + """DocumentStatusDetail. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. Location of the document or folder. + :type path: str + :param created_date_time_utc: Required. Operation created date time. + :type created_date_time_utc: ~datetime.datetime + :param last_action_date_time_utc: Required. Date time in which the operation's status has been + updated. + :type last_action_date_time_utc: ~datetime.datetime + :param status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :type status: str or ~azure.ai.documenttranslation.models.Status + :param to: Required. To language. + :type to: str + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + :param progress: Required. Progress of the translation if available. + :type progress: float + :param id: Required. Document Id. + :type id: str + :param character_charged: Character charged by the API. + :type character_charged: long + """ + + _validation = { + 'path': {'required': True}, + 'created_date_time_utc': {'required': True}, + 'last_action_date_time_utc': {'required': True}, + 'status': {'required': True}, + 'to': {'required': True}, + 'progress': {'required': True, 'maximum': 1, 'minimum': 0}, + 'id': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'str'}, + 'created_date_time_utc': {'key': 'createdDateTimeUtc', 'type': 'iso-8601'}, + 'last_action_date_time_utc': {'key': 'lastActionDateTimeUtc', 'type': 'iso-8601'}, + 'status': {'key': 'status', 'type': 'str'}, + 'to': {'key': 'to', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorV2'}, + 'progress': {'key': 'progress', 'type': 'float'}, + 'id': {'key': 'id', 'type': 'str'}, + 'character_charged': {'key': 'characterCharged', 'type': 'long'}, + } + + def __init__( + self, + *, + path: str, + created_date_time_utc: datetime.datetime, + last_action_date_time_utc: datetime.datetime, + status: Union[str, "Status"], + to: str, + progress: float, + id: str, + error: Optional["ErrorV2"] = None, + character_charged: Optional[int] = None, + **kwargs + ): + super(DocumentStatusDetail, self).__init__(**kwargs) + self.path = path + self.created_date_time_utc = created_date_time_utc + self.last_action_date_time_utc = last_action_date_time_utc + self.status = status + self.to = to + self.error = error + self.progress = progress + self.id = id + self.character_charged = character_charged + + +class DocumentStatusResponse(msrest.serialization.Model): + """Document Status Response. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. The detail status of individual documents. + :type value: list[~azure.ai.documenttranslation.models.DocumentStatusDetail] + :param next_link: Url for the next page. Null if no more pages available. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DocumentStatusDetail]'}, + 'next_link': {'key': '@nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["DocumentStatusDetail"], + next_link: Optional[str] = None, + **kwargs + ): + super(DocumentStatusResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ErrorResponseV2(msrest.serialization.Model): + """Contains unified error information used for HTTP responses across any Cognitive Service. Instances +can be created either through Microsoft.CloudAI.Containers.HttpStatusExceptionV2 or by returning it directly from +a controller. + + :param error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :type error: ~azure.ai.documenttranslation.models.ErrorV2 + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorV2'}, + } + + def __init__( + self, + *, + error: Optional["ErrorV2"] = None, + **kwargs + ): + super(ErrorResponseV2, self).__init__(**kwargs) + self.error = error + + +class ErrorV2(msrest.serialization.Model): + """This contains an outer error with error code, message, details, target and an inner error with more descriptive details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Enums containing high level error codes. Possible values include: + "InvalidRequest", "InvalidArgument", "InternalServerError", "ServiceUnavailable", + "ResourceNotFound", "Unauthorized", "RequestRateTooHigh". + :type code: str or ~azure.ai.documenttranslation.models.ErrorCodeV2 + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + *, + message: str, + code: Optional[Union[str, "ErrorCodeV2"]] = None, + inner_error: Optional["InnerErrorV2"] = None, + **kwargs + ): + super(ErrorV2, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = None + self.inner_error = inner_error + + +class FileFormat(msrest.serialization.Model): + """FileFormat. + + :param format: Name of the format. + :type format: str + :param file_extensions: Supported file extension for this format. + :type file_extensions: list[str] + :param content_types: Supported Content-Types for this format. + :type content_types: list[str] + :param versions: Supported Version. + :type versions: list[str] + """ + + _attribute_map = { + 'format': {'key': 'format', 'type': 'str'}, + 'file_extensions': {'key': 'fileExtensions', 'type': '[str]'}, + 'content_types': {'key': 'contentTypes', 'type': '[str]'}, + 'versions': {'key': 'versions', 'type': '[str]'}, + } + + def __init__( + self, + *, + format: Optional[str] = None, + file_extensions: Optional[List[str]] = None, + content_types: Optional[List[str]] = None, + versions: Optional[List[str]] = None, + **kwargs + ): + super(FileFormat, self).__init__(**kwargs) + self.format = format + self.file_extensions = file_extensions + self.content_types = content_types + self.versions = versions + + +class FileFormatListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[~azure.ai.documenttranslation.models.FileFormat] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[FileFormat]'}, + } + + def __init__( + self, + *, + value: List["FileFormat"], + **kwargs + ): + super(FileFormatListResult, self).__init__(**kwargs) + self.value = value + + +class Glossary(msrest.serialization.Model): + """Glossary / translation memory for the request. + + All required parameters must be populated in order to send to Azure. + + :param glossary_url: Required. Location of the glossary. + We will use the file extension to extract the formatting if the format parameter is not + supplied. + + If the translation language pair is not present in the glossary, it will not be applied. + :type glossary_url: str + :param format: Format. + :type format: str + :param version: Version. + :type version: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'glossary_url': {'required': True}, + } + + _attribute_map = { + 'glossary_url': {'key': 'glossaryUrl', 'type': 'str'}, + 'format': {'key': 'format', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + *, + glossary_url: str, + format: Optional[str] = None, + version: Optional[str] = None, + storage_source: Optional[Union[str, "StorageSource"]] = None, + **kwargs + ): + super(Glossary, self).__init__(**kwargs) + self.glossary_url = glossary_url + self.format = format + self.version = version + self.storage_source = storage_source + + +class InnerErrorV2(msrest.serialization.Model): + """New Inner Error format which conforms to Cognitive Services API Guidelines which is available at https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. +This contains required properties ErrorCode, message and optional properties target, details(key value pair), inner error(this can be nested). + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Gets code error string. + :type code: str + :param message: Required. Gets high level error message. + :type message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + :param inner_error: New Inner Error format which conforms to Cognitive Services API Guidelines + which is available at + https://microsoft.sharepoint.com/%3Aw%3A/t/CognitiveServicesPMO/EUoytcrjuJdKpeOKIK_QRC8BPtUYQpKBi8JsWyeDMRsWlQ?e=CPq8ow. + This contains required properties ErrorCode, message and optional properties target, + details(key value pair), inner error(this can be nested). + :type inner_error: ~azure.ai.documenttranslation.models.InnerErrorV2 + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + 'target': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'inner_error': {'key': 'innerError', 'type': 'InnerErrorV2'}, + } + + def __init__( + self, + *, + code: str, + message: str, + inner_error: Optional["InnerErrorV2"] = None, + **kwargs + ): + super(InnerErrorV2, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = None + self.inner_error = inner_error + + +class SourceInput(msrest.serialization.Model): + """Source of the input documents. + + All required parameters must be populated in order to send to Azure. + + :param source_url: Required. Location of the folder / container or single file with your + documents. + :type source_url: str + :param filter: + :type filter: ~azure.ai.documenttranslation.models.DocumentFilter + :param language: Language code + If none is specified, we will perform auto detect on the document. + :type language: str + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'source_url': {'required': True}, + } + + _attribute_map = { + 'source_url': {'key': 'sourceUrl', 'type': 'str'}, + 'filter': {'key': 'filter', 'type': 'DocumentFilter'}, + 'language': {'key': 'language', 'type': 'str'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + *, + source_url: str, + filter: Optional["DocumentFilter"] = None, + language: Optional[str] = None, + storage_source: Optional[Union[str, "StorageSource"]] = None, + **kwargs + ): + super(SourceInput, self).__init__(**kwargs) + self.source_url = source_url + self.filter = filter + self.language = language + self.storage_source = storage_source + + +class StatusSummary(msrest.serialization.Model): + """StatusSummary. + + All required parameters must be populated in order to send to Azure. + + :param total: Required. Total count. + :type total: int + :param failed: Required. Failed count. + :type failed: int + :param success: Required. Number of Success. + :type success: int + :param in_progress: Required. Number of in progress. + :type in_progress: int + :param not_yet_started: Required. Count of not yet started. + :type not_yet_started: int + :param cancelled: Required. Number of cancelled. + :type cancelled: int + :param total_character_charged: Required. Total characters charged by the API. + :type total_character_charged: long + """ + + _validation = { + 'total': {'required': True}, + 'failed': {'required': True}, + 'success': {'required': True}, + 'in_progress': {'required': True}, + 'not_yet_started': {'required': True}, + 'cancelled': {'required': True}, + 'total_character_charged': {'required': True}, + } + + _attribute_map = { + 'total': {'key': 'total', 'type': 'int'}, + 'failed': {'key': 'failed', 'type': 'int'}, + 'success': {'key': 'success', 'type': 'int'}, + 'in_progress': {'key': 'inProgress', 'type': 'int'}, + 'not_yet_started': {'key': 'notYetStarted', 'type': 'int'}, + 'cancelled': {'key': 'cancelled', 'type': 'int'}, + 'total_character_charged': {'key': 'totalCharacterCharged', 'type': 'long'}, + } + + def __init__( + self, + *, + total: int, + failed: int, + success: int, + in_progress: int, + not_yet_started: int, + cancelled: int, + total_character_charged: int, + **kwargs + ): + super(StatusSummary, self).__init__(**kwargs) + self.total = total + self.failed = failed + self.success = success + self.in_progress = in_progress + self.not_yet_started = not_yet_started + self.cancelled = cancelled + self.total_character_charged = total_character_charged + + +class StorageSourceListResult(msrest.serialization.Model): + """Base type for List return in our api. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. list of objects. + :type value: list[str or ~azure.ai.documenttranslation.models.StorageSource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[str]'}, + } + + def __init__( + self, + *, + value: List[Union[str, "StorageSource"]], + **kwargs + ): + super(StorageSourceListResult, self).__init__(**kwargs) + self.value = value + + +class TargetInput(msrest.serialization.Model): + """Destination for the finished translated documents. + + All required parameters must be populated in order to send to Azure. + + :param target_url: Required. Location of the folder / container with your documents. + :type target_url: str + :param category: Category / custom system for translation request. + :type category: str + :param language: Required. Target Language. + :type language: str + :param glossaries: List of Glossary. + :type glossaries: list[~azure.ai.documenttranslation.models.Glossary] + :param storage_source: Storage Source. Possible values include: "AzureBlob". + :type storage_source: str or ~azure.ai.documenttranslation.models.StorageSource + """ + + _validation = { + 'target_url': {'required': True}, + 'language': {'required': True}, + } + + _attribute_map = { + 'target_url': {'key': 'targetUrl', 'type': 'str'}, + 'category': {'key': 'category', 'type': 'str'}, + 'language': {'key': 'language', 'type': 'str'}, + 'glossaries': {'key': 'glossaries', 'type': '[Glossary]'}, + 'storage_source': {'key': 'storageSource', 'type': 'str'}, + } + + def __init__( + self, + *, + target_url: str, + language: str, + category: Optional[str] = None, + glossaries: Optional[List["Glossary"]] = None, + storage_source: Optional[Union[str, "StorageSource"]] = None, + **kwargs + ): + super(TargetInput, self).__init__(**kwargs) + self.target_url = target_url + self.category = category + self.language = language + self.glossaries = glossaries + self.storage_source = storage_source diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/__init__.py new file mode 100644 index 000000000000..e524e2215fb7 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/__init__.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._document_translation_operations import DocumentTranslationOperations + +__all__ = [ + 'DocumentTranslationOperations', +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/_document_translation_operations.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/_document_translation_operations.py new file mode 100644 index 000000000000..3535e562022e --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/operations/_document_translation_operations.py @@ -0,0 +1,765 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.polling.base_polling import LROBasePolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DocumentTranslationOperations(object): + """DocumentTranslationOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.ai.documenttranslation.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def _submit_batch_request_initial( + self, + inputs, # type: List["_models.BatchRequest"] + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + + _body = _models.BatchSubmissionRequest(inputs=inputs) + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._submit_batch_request_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if _body is not None: + body_content = self._serialize.body(_body, 'BatchSubmissionRequest') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Operation-Location']=self._deserialize('str', response.headers.get('Operation-Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + _submit_batch_request_initial.metadata = {'url': '/batches'} # type: ignore + + def begin_submit_batch_request( + self, + inputs, # type: List["_models.BatchRequest"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Submit a document translation request to the Document Translation service. + + Use this API to submit a bulk (batch) translation request to the Document Translation service. + Each request can contain multiple documents and must contain a source and destination container + for each document. + + The prefix and suffix filter (if supplied) are used to filter folders. The prefix is applied to + the subpath after the container name. + + Glossaries / Translation memory can be included in the request and are applied by the service + when the document is translated. + + If the glossary is invalid or unreachable during translation, an error is indicated in the + document status. + If a file with the same name already exists at the destination, it will be overwritten. The + targetUrl for each target language must be unique. + + :param inputs: The input list of documents or folders containing documents. + :type inputs: list[~azure.ai.documenttranslation.models.BatchRequest] + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: Pass in True if you'd like the LROBasePolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', False) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._submit_batch_request_initial( + inputs=inputs, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + + if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_submit_batch_request.metadata = {'url': '/batches'} # type: ignore + + def get_operations( + self, + top=50, # type: Optional[int] + skip=0, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.BatchStatusResponse"] + """Returns a list of batch requests submitted and the status for each request. + + Returns a list of batch requests submitted and the status for each request. + This list only contains batch requests submitted by the user (based on the subscription). The + status for each request is sorted by id. + + If the number of requests exceeds our paging limit, server-side paging is used. Paginated + responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either BatchStatusResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.documenttranslation.models.BatchStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusResponse"] + error_map = { + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operations.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('BatchStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + get_operations.metadata = {'url': '/batches'} # type: ignore + + def get_document_status( + self, + id, # type: str + document_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.DocumentStatusDetail" + """Returns the status for a specific document. + + Returns the translation status for a specific document based on the request Id and document Id. + + :param id: Format - uuid. The batch id. + :type id: str + :param document_id: Format - uuid. The document id. + :type document_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DocumentStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.DocumentStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + 'documentId': self._serialize.url("document_id", document_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('DocumentStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_document_status.metadata = {'url': '/batches/{id}/documents/{documentId}'} # type: ignore + + def get_operation_status( + self, + id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.BatchStatusDetail" + """Returns the status for a document translation request. + + Returns the status for a document translation request. + The status includes the overall request status, as well as the status for documents that are + being translated as part of that request. + + :param id: Format - uuid. The operation id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_operation_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_operation_status.metadata = {'url': '/batches/{id}'} # type: ignore + + def cancel_operation( + self, + id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.BatchStatusDetail" + """Cancel a currently processing or queued operation. + + Cancel a currently processing or queued operation. + Cancel a currently processing or queued operation. + An operation will not be cancelled if it is already completed or failed or cancelling. A bad + request will be returned. + All documents that have completed translation will not be cancelled and will be charged. + All pending documents will be cancelled if possible. + + :param id: Format - uuid. The operation-id. + :type id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: BatchStatusDetail, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.BatchStatusDetail + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchStatusDetail"] + error_map = { + 409: ResourceExistsError, + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.cancel_operation.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('BatchStatusDetail', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + cancel_operation.metadata = {'url': '/batches/{id}'} # type: ignore + + def get_operation_documents_status( + self, + id, # type: str + top=50, # type: Optional[int] + skip=0, # type: Optional[int] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.DocumentStatusResponse"] + """Returns the status for all documents in a batch document translation request. + + Returns the status for all documents in a batch document translation request. + + The documents included in the response are sorted by document Id in descending order. If the + number of documents in the response exceeds our paging limit, server-side paging is used. + Paginated responses indicate a partial result and include a continuation token in the response. + The absence of a continuation token means that no additional pages are available. + + $top and $skip query parameters can be used to specify a number of results to return and an + offset for the collection. + The server honors the values specified by the client. However, clients must be prepared to + handle responses that contain a different page size or contain a continuation token. + + When both $top and $skip are included, the server should first apply $skip and then $top on the + collection. + Note: If the server can't honor $top and/or $skip, the server must return an error to the + client informing about it instead of just ignoring the query options. + This reduces the risk of the client making assumptions about the data returned. + + :param id: Format - uuid. The operation id. + :type id: str + :param top: Take the $top entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type top: int + :param skip: Skip the $skip entries in the collection + When both $top and $skip are supplied, $skip is applied first. + :type skip: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DocumentStatusResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.ai.documenttranslation.models.DocumentStatusResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.DocumentStatusResponse"] + error_map = { + 409: ResourceExistsError, + 400: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 401: lambda response: ClientAuthenticationError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 404: lambda response: ResourceNotFoundError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.get_operation_documents_status.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int', maximum=100, minimum=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', maximum=2147483647, minimum=0) + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + 'id': self._serialize.url("id", id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DocumentStatusResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + get_operation_documents_status.metadata = {'url': '/batches/{id}/documents'} # type: ignore + + def get_document_formats( + self, + **kwargs # type: Any + ): + # type: (...) -> "_models.FileFormatListResult" + """Returns a list of supported document formats. + + The list of supported document formats supported by the Document Translation service. + The list includes the common file extension, as well as the content-type if using the upload + API. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_formats.metadata = {'url': '/documents/formats'} # type: ignore + + def get_glossary_formats( + self, + **kwargs # type: Any + ): + # type: (...) -> "_models.FileFormatListResult" + """Returns the list of supported glossary formats. + + The list of supported glossary formats supported by the Document Translation service. + The list includes the common file extension used. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: FileFormatListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.FileFormatListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.FileFormatListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_glossary_formats.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('FileFormatListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_glossary_formats.metadata = {'url': '/glossaries/formats'} # type: ignore + + def get_document_storage_source( + self, + **kwargs # type: Any + ): + # type: (...) -> "_models.StorageSourceListResult" + """Returns a list of supported storage sources. + + Returns a list of storage sources/options supported by the Document Translation service. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageSourceListResult, or the result of cls(response) + :rtype: ~azure.ai.documenttranslation.models.StorageSourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageSourceListResult"] + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 429: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 500: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + 503: lambda response: HttpResponseError(response=response, model=self._deserialize(_models.ErrorResponseV2, response)), + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.get_document_storage_source.metadata['url'] # type: ignore + path_format_arguments = { + 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('StorageSourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_document_storage_source.metadata = {'url': '/storagesources'} # type: ignore diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/py.typed b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_generated/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py new file mode 100644 index 000000000000..059be9359f9d --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline.policies import AzureKeyCredentialPolicy + +COGNITIVE_KEY_HEADER = "Ocp-Apim-Subscription-Key" + + +def get_authentication_policy(credential): + authentication_policy = None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if isinstance(credential, AzureKeyCredential): + authentication_policy = AzureKeyCredentialPolicy( + name=COGNITIVE_KEY_HEADER, credential=credential + ) + elif credential is not None and not hasattr(credential, "get_token"): + raise TypeError("Unsupported credential: {}. Use an instance of AzureKeyCredential " + "or a token credential from azure.identity".format(type(credential))) + + return authentication_policy \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py new file mode 100644 index 000000000000..492afc1ab7d4 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py @@ -0,0 +1,273 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from typing import Any, List + + +class StorageSourceInput(object): + """Source of the input documents. + + :param source_url: Required. Location of the folder / container or single file with your + documents. + :type source_url: str + :param str language: Language code + If none is specified, we will perform auto detect on the document. + :keyword str prefix: A case-sensitive prefix string to filter documents in the source path for + translation. For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for + translation. + :keyword str suffix: A case-sensitive suffix string to filter documents in the source path for + translation. This is most often use for file extensions. + :ivar storage_source: Storage Source. Default value: "AzureBlob". + :vartype storage_source: str + """ + + def __init__( + self, + source_url, + language, + **kwargs + ): + # type: (str, str, **Any) -> None + self.storage_source = "AzureBlob" + self.source_url = source_url + self.language = language + self.prefix = kwargs.get("prefix", None) + self.suffix = kwargs.get("suffix", None) + + +class TranslationGlossary(object): + """Glossary / translation memory for the request. + + :param glossary_url: Required. Location of the glossary. + We will use the file extension to extract the formatting if the format parameter is not + supplied. + If the translation language pair is not present in the glossary, it will not be applied. + :type glossary_url: str + :keyword str format: Format. + :keyword str version: Version. + :ivar storage_source: Storage Source. Default value: "AzureBlob". + :vartype storage_source: str + """ + + def __init__( + self, + glossary_url, + **kwargs + ): + # type: (str, **Any) -> None + self.storage_source = "AzureBlob" + self.glossary_url = glossary_url + self.format = kwargs.get("format", None) + self.version = kwargs.get("version", None) + + +class StorageTargetInput(object): + """Destination for the finished translated documents. + + :param target_url: Required. Location of the folder / container with your documents. + :type target_url: str + :param language: Required. Target Language. + :type language: str + :keyword str custom_model_id: Category / custom system for translation request. + :keyword glossaries: List of TranslationGlossary. + :paramtype glossaries: Union[list[str], list[~azure.ai.documenttranslation.TranslationGlossary]] + :ivar storage_source: Storage Source. Default value: "AzureBlob". + :vartype storage_source: str + """ + + def __init__( + self, + target_url, + language, + **kwargs + ): + # type: (str, str, **Any) -> None + self.storage_source = "AzureBlob" + self.target_url = target_url + self.language = language + self.custom_model_id = kwargs.get("custom_model_id", None) + self.glossaries = kwargs.get("glossaries", None) + + +class BatchDocumentInput(object): + """Definition for the input batch translation request. + + :param source: Required. Source of the input documents. + :type source: ~azure.ai.documenttranslation.StorageSourceInput + :param targets: Required. Location of the destination for the output. + :type targets: list[StorageTargetInput] + :keyword storage_type: Storage type of the input documents source string. Possible values + include: "Folder", "File". + :paramtype storage_type: str or ~azure.ai.documenttranslation.StorageInputType + """ + + def __init__( + self, + source, + targets, + **kwargs + ): + # type: (StorageSourceInput, List[StorageTargetInput], **Any) -> None + self.source = source + self.targets = targets + self.storage_type = kwargs.get("storage_type", None) + + +class BatchStatusDetail(object): + """Job status response. + + :ivar id: Required. Id of the operation. + :vartype id: str + :ivar created_on: Required. Operation created date time. + :vartype created_on: ~datetime.datetime + :ivar last_updated_on: Required. Date time in which the operation's status has been + updated. + :vartype last_updated_on: ~datetime.datetime + :ivar status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :vartype status: str + :ivar summary: Required. + :vartype summary: ~azure.ai.documenttranslation.StatusSummary + :ivar error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :vartype error: ~azure.ai.documenttranslation.DocumentTranslationError + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.id = kwargs['id'] + self.created_on = kwargs['created_on'] + self.last_updated_on = kwargs['last_updated_on'] + self.status = kwargs.get('status', None) + self.summary = kwargs['summary'] + self.error = kwargs.get("error", None) + + +class DocumentStatusDetail(object): + """DocumentStatusDetail. + + :ivar document_url: Required. Location of the document or folder. + :vartype document_url: str + :ivar created_on: Required. Operation created date time. + :vartype created_on: ~datetime.datetime + :ivar last_updated_on: Required. Date time in which the operation's status has been + updated. + :vartype last_updated_on: ~datetime.datetime + :ivar status: Required. List of possible statuses for job or document. Possible values + include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", + "ValidationFailed". + :vartype status: str + :ivar translate_to: Required. To language. + :vartype translate_to: str + :ivar error: This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + :vartype error: ~azure.ai.documenttranslation.DocumentTranslationError + :ivar progress: Progress of the translation if available. + :vartype progress: float + :ivar id: Document Id. + :vartype id: str + :ivar int characters_charged: Character charged by the API. + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.document_url = kwargs['document_url'] + self.created_on = kwargs['created_on'] + self.last_updated_on = kwargs['last_updated_on'] + self.status = kwargs['status'] + self.translate_to = kwargs['translate_to'] + self.error = kwargs.get('error', None) + self.progress = kwargs.get('progress', None) + self.id = kwargs.get('id', None) + self.characters_charged = kwargs.get('characters_charged', None) + + +class DocumentTranslationError(object): + """This contains an outer error with error code, message, details, target and an + inner error with more descriptive details. + + :ivar code: Enums containing high level error codes. Possible values include: + "InvalidRequest", "InvalidArgument", "InternalServerError", "ServiceUnavailable", + "ResourceNotFound", "Unauthorized", "RequestRateTooHigh". + :vartype code: str + :ivar message: Gets high level error message. + :vartype message: str + :ivar target: Gets the source of the error. + For example it would be "documents" or "document id" in case of invalid document. + :vartype target: str + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.code = kwargs.get('code', None) + self.message = None + self.target = None + + +class StatusSummary(object): + """StatusSummary. + + :ivar total: Total count. + :vartype total: int + :ivar failed: Failed count. + :vartype failed: int + :ivar succeeded: Number of Success. + :vartype succeeded: int + :ivar in_progress: Number of in progress. + :vartype in_progress: int + :ivar not_yet_started: Count of not yet started. + :vartype not_yet_started: int + :ivar cancelled: Number of cancelled. + :vartype cancelled: int + :ivar int total_characters_charged: Required. Total characters charged by the API. + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.total = kwargs.get('total', None) + self.failed = kwargs.get('failed', None) + self.succeeded = kwargs.get('succeeded', None) + self.in_progress = kwargs.get('in_progress', None) + self.not_yet_started = kwargs.get('not_yet_started', None) + self.cancelled = kwargs.get('cancelled', None) + self.total_characters_charged = kwargs.get('total_characters_charged', None) + + +class FileFormat(object): + """FileFormat. + + :ivar format: Name of the format. + :vartype format: str + :ivar file_extensions: Supported file extension for this format. + :vartype file_extensions: list[str] + :ivar content_types: Supported Content-Types for this format. + :vartype content_types: list[str] + :ivar versions: Supported Version. + :vartype versions: list[str] + """ + + def __init__( + self, + **kwargs + ): + # type: (**Any) -> None + self.format = kwargs.get('format', None) + self.file_extensions = kwargs.get('file_extensions', None) + self.content_types = kwargs.get('content_types', None) + self.versions = kwargs.get('versions', None) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_user_agent.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_user_agent.py new file mode 100644 index 000000000000..9f13f549de6a --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_user_agent.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from ._version import VERSION + +USER_AGENT = "ai-translatortext/{}".format(VERSION) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_version.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_version.py new file mode 100644 index 000000000000..3a9726d91eee --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_version.py @@ -0,0 +1,7 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +VERSION = "1.0.0b1" diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/__init__.py new file mode 100644 index 000000000000..dd05613d4fe4 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/__init__.py @@ -0,0 +1,11 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from ._client_async import DocumentTranslationClient + +__all__ = [ + "DocumentTranslationClient" +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py new file mode 100644 index 000000000000..19f58c9c8b6b --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py @@ -0,0 +1,162 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from typing import Union, Any, List, TYPE_CHECKING +from azure.core.polling import AsyncLROPoller +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.tracing.decorator import distributed_trace +from azure.core.async_paging import AsyncItemPaged +from .._generated.aio import BatchDocumentTranslationClient as _BatchDocumentTranslationClient +from .._user_agent import USER_AGENT +from .._models import BatchStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat +from .._helpers import get_authentication_policy +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + from azure.core.credentials import AzureKeyCredential + + +class DocumentTranslationClient(object): + """DocumentTranslationClient + + """ + + def __init__( + self, endpoint: str, credential: Union["AzureKeyCredential", "AsyncTokenCredential"], **kwargs: Any + ) -> None: + """ + + :param str endpoint: + :param credential: + :type credential: Union[AzureKeyCredential, AsyncTokenCredential] + :keyword str api_version: + :rtype: None + """ + self._endpoint = endpoint + self._credential = credential + self._api_version = kwargs.pop('api_version', None) + + authentication_policy = get_authentication_policy(credential) + self._client = _BatchDocumentTranslationClient( + endpoint=endpoint, + credential=credential, # type: ignore + api_version=self._api_version, + sdk_moniker=USER_AGENT, + authentication_policy=authentication_policy, + polling_interval=5, # TODO what is appropriate polling interval + **kwargs + ) + + @distributed_trace_async + async def begin_batch_translation(self, inputs, **kwargs): + # type: (List[BatchDocumentInput], **Any) -> AsyncLROPoller[BatchStatusDetail] + """ + + :param inputs: + :type inputs: List[~azure.ai.documenttranslation.BatchDocumentInput] + :rtype: ~azure.core.polling.AsyncLROPoller[BatchStatusDetail] + """ + return await self._client.document_translation.begin_submit_batch_request( + inputs=inputs, + polling=True, + **kwargs + ) + + @distributed_trace_async + async def get_batch_status(self, batch_id, **kwargs): + # type: (Union[AsyncLROPoller, str], **Any) -> BatchStatusDetail + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :rtype: ~azure.ai.documenttranslation.BatchStatusDetail + """ + if isinstance(batch_id, AsyncLROPoller): + batch_id = batch_id.batch_id + + return await self._client.document_translation.get_operation_status(batch_id, **kwargs) + + @distributed_trace_async + async def cancel_batch(self, batch_id, **kwargs): + # type: (Union[AsyncLROPoller, str], **Any) -> None + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :rtype: None + """ + if isinstance(batch_id, AsyncLROPoller): + batch_id = batch_id.batch_id + + await self._client.document_translation.cancel_operation(batch_id, **kwargs) + + @distributed_trace + def list_batches(self, **kwargs): + # type: (**Any) -> AsyncItemPaged[BatchStatusDetail] + """ + + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.async_paging.AsyncItemPaged[BatchStatusDetail] + """ + return self._client.document_translation.get_operations(**kwargs) + + @distributed_trace + def list_documents_statuses(self, batch_id, **kwargs): + # type: (Union[AsyncLROPoller, str], **Any) -> AsyncItemPaged[DocumentStatusDetail] + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :keyword int results_per_page: + :keyword int skip: + :rtype: ~azure.core.async_paging.AsyncItemPaged[DocumentStatusDetail] + """ + if isinstance(batch_id, AsyncLROPoller): + batch_id = batch_id.batch_id + + return self._client.document_translation.get_operation_documents_status(batch_id, **kwargs) + + @distributed_trace_async + async def get_document_status(self, batch_id, document_id, **kwargs): + # type: (Union[AsyncLROPoller, str], str, **Any) -> DocumentStatusDetail + """ + + :param batch_id: guid id for batch or poller object + :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :param document_id: guid id for document + :type document_id: str + :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail + """ + return await self._client.document_translation.get_document_status(batch_id, document_id, **kwargs) + + @distributed_trace_async + async def get_supported_storage_sources(self, **kwargs): + # type: (**Any) -> List[str] + """ + + :rtype: list[str] + """ + return await self._client.document_translation.get_document_storage_source(**kwargs) + + @distributed_trace_async + async def get_supported_glossary_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: list[FileFormat] + """ + + return await self._client.document_translation.get_glossary_formats(**kwargs) + + @distributed_trace_async + async def get_supported_document_formats(self, **kwargs): + # type: (**Any) -> List[FileFormat] + """ + + :rtype: list[FileFormat] + """ + + return await self._client.document_translation.get_document_formats(**kwargs) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/py.typed b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/dev_requirements.txt b/sdk/documenttranslation/azure-ai-documenttranslation/dev_requirements.txt new file mode 100644 index 000000000000..2e614298d253 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/dev_requirements.txt @@ -0,0 +1,5 @@ +-e ../../../tools/azure-sdk-tools +-e ../../../tools/azure-devtools +../../core/azure-core +-e ../../identity/azure-identity +aiohttp>=3.0; python_version >= '3.5' diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py new file mode 100644 index 000000000000..0372f9bd309f --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py @@ -0,0 +1,74 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_batch_translation(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageSourceInput, + StorageTargetInput + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source=StorageSourceInput( + source_url=source_container_url, + language="en", + prefix="document_2021" + ), + targets=[ + StorageTargetInput( + target_url=target_container_url_es, + language="es" + ), + StorageTargetInput( + target_url=target_container_url_fr, + language="fr" + ) + ], + storage_type="file" + ) + ] + + poller = client.begin_batch_translation(batch) + + batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + + print("Batch initial status: {}".format(batch_detail.status)) + print("Number of translations on documents: {}".format(batch_detail.summary.total)) + + batch_result = poller.result() # type: BatchStatusDetail + if batch_result.status == "Succeeded": + print("We translated our documents!") + if batch_result.summary.failed > 0: + check_documents(client, batch_detail.id) + + if batch_result.status == "Failed": + print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) + check_documents(client, batch_detail.id) + + +def check_documents(client, batch_id): + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.document_url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py new file mode 100644 index 000000000000..fdb56a2856f7 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_cancel_batch(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageSourceInput, + StorageTargetInput + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source=StorageSourceInput( + source_url=source_container_url, + language="en", + prefix="document_2021" + ), + targets=[ + StorageTargetInput( + target_url=target_container_url_es, + language="es" + ) + ], + storage_type="file" + ) + ] + + poller = client.begin_batch_translation(batch) + + batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + + print("Batch status: {}".format(batch_detail.status)) + print("Number of translations on documents: {}".format(batch_detail.summary.total)) + + client.cancel_batch(poller) + detail = client.get_batch_status(poller) # type: BatchStatusDetail + + if detail.status in ["Cancelled", "Cancelling"]: + print("We cancelled batch with ID: {}".format(detail.id)) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py new file mode 100644 index 000000000000..9c0093f0efaf --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_batch_translation(): + import os + import time + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageSourceInput, + StorageTargetInput + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source=StorageSourceInput( + source_url=source_container_url, + language="en", + prefix="document_2021" + ), + targets=[ + StorageTargetInput( + target_url=target_container_url_es, + language="es" + ), + StorageTargetInput( + target_url=target_container_url_fr, + language="fr" + ) + ], + storage_type="folder" + ) + ] + + poller = client.begin_batch_translation(batch) + + while True: + batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + if batch_detail.status == ["NotStarted", "Running"]: + time.sleep(5) + continue + + if batch_detail.status == ["Failed", "ValidationFailed"]: + print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) + check_documents(client, batch_detail.id) + exit(1) + + if batch_detail.status == "Succeeded": + print("We translated our documents!") + if batch_detail.summary.failed > 0: + check_documents(client, batch_detail.id) + break + + +def check_documents(client, batch_id): + docs_to_retry = [] + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.document_url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.id not in docs_to_retry: + docs_to_retry.append(document.id) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py new file mode 100644 index 000000000000..c557033b532f --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_custom_translation(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageSourceInput, + StorageTargetInput + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + custom_model_id = os.environ["AZURE_DOCUMENT_TRANSLATION_MODEL_ID"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + batch = [ + BatchDocumentInput( + source=StorageSourceInput( + source_url=source_container_url, + language="en", + prefix="document_2021" + ), + targets=[ + StorageTargetInput( + target_url=target_container_url_fr, + language="fr", + custom_model_id=custom_model_id + ) + ] + ) + ] + + poller = client.begin_batch_translation(batch) + batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + + print("Batch initial status: {}".format(batch_detail.status)) + print("Number of translations on documents: {}".format(batch_detail.summary.total)) + + batch_result = poller.result() # type: BatchStatusDetail + if batch_result.status == "Succeeded": + print("We translated our documents!") + if batch_result.summary.failed > 0: + check_documents(client, batch_detail.id) + + if batch_result.status == "Failed": + print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) + check_documents(client, batch_detail.id) + + +def check_documents(client, batch_id): + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.document_url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py new file mode 100644 index 000000000000..c57fcfafbb99 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def sample_list_all_batches(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + ) + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + batches = client.list_batches() + + print("Batches summary") + for batch in batches: + print("Batch ID: {}".format(batch.id)) + print("Batch status: {}".format(batch.status)) + print("Batch created on: {}".format(batch.created_on)) + print("Batch last updated on: {}".format(batch.last_updated)) + print("Batch number of translations on documents: {}".format(batch.summary.total)) + + print("Of total documents...") + print("{} failed".format(batch.summary.failed)) + print("{} succeeded".format(batch.summary.succeeded)) + print("{} in progress".format(batch.summary.in_progress)) + print("{} not yet started".format(batch.summary.not_yet_started)) + print("{} cancelled".format(batch.summary.cancelled)) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/sdk_packaging.toml b/sdk/documenttranslation/azure-ai-documenttranslation/sdk_packaging.toml new file mode 100644 index 000000000000..901bc8ccbfa6 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/sdk_packaging.toml @@ -0,0 +1,2 @@ +[packaging] +auto_update = false diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/setup.cfg b/sdk/documenttranslation/azure-ai-documenttranslation/setup.cfg new file mode 100644 index 000000000000..3c6e79cf31da --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/setup.py b/sdk/documenttranslation/azure-ai-documenttranslation/setup.py new file mode 100644 index 000000000000..2b8dc240462f --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/setup.py @@ -0,0 +1,71 @@ +from setuptools import setup, find_packages +import os +from io import open +import re + +PACKAGE_NAME = "azure-ai-documenttranslation" +PACKAGE_PPRINT_NAME = "Document Translation Package" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace('-', '/') +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace('-', '.') + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', + fd.read(), re.MULTILINE).group(1) +if not version: + raise RuntimeError('Cannot find version information') + +with open('README.md', encoding='utf-8') as f: + long_description = f.read() + +setup( + name=PACKAGE_NAME, + version=version, + description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), + + # ensure that these are updated to reflect the package owners' information + long_description=long_description, + url='https://github.com/Azure/azure-sdk-for-python', + author='Microsoft Corporation', + author_email='azuresdkengsysadmins@microsoft.com', + + license='MIT License', + # ensure that the development status reflects the status of your package + classifiers=[ + "Development Status :: 4 - Beta", + + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'License :: OSI Approved :: MIT License', + ], + packages=find_packages(exclude=[ + 'tests', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + 'azure.ai', + ]), + install_requires=[ + "azure-core<2.0.0,>=1.8.2", + "msrest>=0.6.21", + 'six>=1.11.0', + 'azure-common~=1.1', + ], + extras_require={ + ":python_version<'3.0'": ['azure-ai-nspkg'], + ":python_version<'3.5'": ['typing'], + }, + project_urls={ + 'Bug Reports': 'https://github.com/Azure/azure-sdk-for-python/issues', + 'Source': 'https://github.com/Azure/azure-sdk-python', + } +) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/tests/testcase.py b/sdk/documenttranslation/azure-ai-documenttranslation/tests/testcase.py new file mode 100644 index 000000000000..fad27bf990b8 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/tests/testcase.py @@ -0,0 +1,22 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +from devtools_testutils import ( + AzureTestCase, +) + +from azure_devtools.scenario_tests import ( + ReplayableTest +) + + +class DocumentTranslationTest(AzureTestCase): + FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['Ocp-Apim-Subscription-Key'] + + def __init__(self, method_name): + super(DocumentTranslationTest, self).__init__(method_name) + self.vcr.match_on = ["path", "method", "query"] + diff --git a/sdk/documenttranslation/ci.yml b/sdk/documenttranslation/ci.yml new file mode 100644 index 000000000000..feba1d074712 --- /dev/null +++ b/sdk/documenttranslation/ci.yml @@ -0,0 +1,32 @@ +# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file. + +trigger: + branches: + include: + - master + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/template/ + +pr: + branches: + include: + - master + - feature/* + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/template/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: documenttranslation + Artifacts: + - name: azure_ai_documenttranslation + safeName: azureaidocumenttranslation \ No newline at end of file diff --git a/sdk/documenttranslation/tests.yml b/sdk/documenttranslation/tests.yml new file mode 100644 index 000000000000..6d13570bfb33 --- /dev/null +++ b/sdk/documenttranslation/tests.yml @@ -0,0 +1,13 @@ +trigger: none + +jobs: + - template: ../../eng/pipelines/templates/jobs/archetype-sdk-tests.yml + parameters: + AllocateResourceGroup: 'false' + BuildTargetingString: azure-ai-documenttranslation + ServiceDirectory: documenttranslation + EnvVars: + AZURE_CLIENT_ID: $(aad-azure-sdk-test-client-id) + AZURE_CLIENT_SECRET: $(aad-azure-sdk-test-client-secret) + AZURE_TENANT_ID: $(aad-azure-sdk-test-tenant-id) + TEST_MODE: 'RunLiveNoRecord' \ No newline at end of file From 64ddb4187fe1842af6c42cf3ef1ca5fe825b46f1 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Mon, 22 Feb 2021 18:24:40 -0800 Subject: [PATCH 2/8] update samples --- .../ai/documenttranslation/_api_version.py | 2 +- .../azure/ai/documenttranslation/_models.py | 6 +- .../samples/sample_azure_storage.py | 122 ++++++++++++++++++ .../samples/sample_batch_translation.py | 26 +++- .../samples/sample_check_statuses.py | 22 +++- .../samples/sample_custom_translation.py | 28 ++-- .../samples/sample_list_batches.py | 2 +- 7 files changed, 181 insertions(+), 27 deletions(-) create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py index 7ef6fa8c09dd..0f162daff872 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_api_version.py @@ -10,7 +10,7 @@ class DocumentTranslationVersion(str, Enum): """Document Translation API versions supported by this package""" #: This is the default version - V1_1_PREVIEW = "1.0-preview.1" + V1_0_PREVIEW = "1.0-preview.1" def validate_api_version(api_version): diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py index 492afc1ab7d4..399289aec026 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py @@ -153,8 +153,8 @@ def __init__( class DocumentStatusDetail(object): """DocumentStatusDetail. - :ivar document_url: Required. Location of the document or folder. - :vartype document_url: str + :ivar url: Required. Location of the document or folder. + :vartype url: str :ivar created_on: Required. Operation created date time. :vartype created_on: ~datetime.datetime :ivar last_updated_on: Required. Date time in which the operation's status has been @@ -181,7 +181,7 @@ def __init__( **kwargs ): # type: (**Any) -> None - self.document_url = kwargs['document_url'] + self.url = kwargs['url'] self.created_on = kwargs['created_on'] self.last_updated_on = kwargs['last_updated_on'] self.status = kwargs['status'] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py new file mode 100644 index 000000000000..7fc7212fe3ae --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + + +def batch_translation_with_storage(): + import os + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation import ( + DocumentTranslationClient, + BatchDocumentInput, + StorageSourceInput, + StorageTargetInput + ) + from azure.storage.blob import ContainerClient, generate_container_sas, ContainerSasPermissions + + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_storage_endpoint = os.environ["AZURE_STORAGE_SOURCE_ENDPOINT"] + source_storage_account_name = os.environ["AZURE_STORAGE_SOURCE_ACCOUNT_NAME"] + source_storage_container_name = os.environ["AZURE_STORAGE_SOURCE_CONTAINER_NAME"] + source_storage_key = os.environ["AZURE_STORAGE_SOURCE_KEY"] + target_storage_endpoint = os.environ["AZURE_STORAGE_TARGET_ENDPOINT"] + target_storage_account_name = os.environ["AZURE_STORAGE_TARGET_ACCOUNT_NAME"] + target_storage_container_name = os.environ["AZURE_STORAGE_TARGET_CONTAINER_NAME"] + target_storage_key = os.environ["AZURE_STORAGE_TARGET_KEY"] + + batch_client = DocumentTranslationClient( + endpoint, AzureKeyCredential(key) + ) + + container_client = ContainerClient( + source_storage_endpoint, + container_name=source_storage_container_name, + credential=source_storage_key + ) + + with open("document.txt", "rb") as doc: + container_client.upload_blob("document.txt", doc) + + source_container_sas = generate_container_sas( + account_name=source_storage_account_name, + container_name=source_storage_container_name, + account_key=source_storage_key, + permission=ContainerSasPermissions.from_string("rl") + ) + + target_container_sas = generate_container_sas( + account_name=target_storage_account_name, + container_name=target_storage_container_name, + account_key=target_storage_key, + permission=ContainerSasPermissions.from_string("rlwd") + ) + + source_container_url = source_storage_endpoint + "/" + source_storage_container_name + "?" + source_container_sas + target_container_url = target_storage_endpoint + "/" + target_storage_container_name + "?" + target_container_sas + + batch = [ + BatchDocumentInput( + source=StorageSourceInput( + source_url=source_container_url, + language="en", + prefix="document" + ), + targets=[ + StorageTargetInput( + target_url=target_container_url, + language="es" + ) + ] + ) + ] + + poller = batch_client.begin_batch_translation(batch) + + batch_detail = poller.result() + if batch_detail.status == "Succeeded": + print("We translated our documents!") + if batch_detail.summary.failed > 0: + check_documents(batch_client, batch_detail.id) + + if batch_detail.status in ["Failed", "ValidationFailed"]: + if batch_detail.error: + print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) + check_documents(batch_client, batch_detail.id) + exit(1) + + container_client = ContainerClient( + target_storage_endpoint, + container_name=target_storage_container_name, + credential=target_storage_key + ) + + target_container_client = container_client.from_container_url(target_container_url) + + with open("translated.txt", "wb") as my_blob: + download_stream = target_container_client.download_blob("document.txt") + my_blob.write(download_stream.readall()) + + +def check_documents(client, batch_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container.") + raise err + + docs_to_retry = [] + for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py index 0372f9bd309f..a8a0912fc322 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py @@ -51,24 +51,36 @@ def sample_batch_translation(): print("Batch initial status: {}".format(batch_detail.status)) print("Number of translations on documents: {}".format(batch_detail.summary.total)) - batch_result = poller.result() # type: BatchStatusDetail - if batch_result.status == "Succeeded": + batch_detail = poller.result() # type: BatchStatusDetail + if batch_detail.status == "Succeeded": print("We translated our documents!") - if batch_result.summary.failed > 0: + if batch_detail.summary.failed > 0: check_documents(client, batch_detail.id) - if batch_result.status == "Failed": - print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) + if batch_detail.status in ["Failed", "ValidationFailed"]: + if batch_detail.error: + print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) check_documents(client, batch_detail.id) + exit(1) def check_documents(client, batch_id): - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container.") + raise err + + docs_to_retry = [] for document in doc_statuses: if document.status == "Failed": print("Document at {} failed to be translated to {} language".format( - document.document_url, document.translate_to + document.url, document.translate_to )) print("Document ID: {}, Error Code: {}, Message: {}".format( document.id, document.error.code, document.error.message )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py index 9c0093f0efaf..76312472af2f 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py @@ -49,12 +49,13 @@ def sample_batch_translation(): while True: batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail - if batch_detail.status == ["NotStarted", "Running"]: + if batch_detail.status in ["NotStarted", "Running"]: time.sleep(5) continue - if batch_detail.status == ["Failed", "ValidationFailed"]: - print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) + if batch_detail.status in ["Failed", "ValidationFailed"]: + if batch_detail.error: + print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) check_documents(client, batch_detail.id) exit(1) @@ -66,15 +67,22 @@ def sample_batch_translation(): def check_documents(client, batch_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container.") + raise err + docs_to_retry = [] - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] for document in doc_statuses: if document.status == "Failed": print("Document at {} failed to be translated to {} language".format( - document.document_url, document.translate_to + document.url, document.translate_to )) print("Document ID: {}, Error Code: {}, Message: {}".format( document.id, document.error.code, document.error.message )) - if document.id not in docs_to_retry: - docs_to_retry.append(document.id) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py index c557033b532f..d1806bbd35c1 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py @@ -46,24 +46,36 @@ def sample_custom_translation(): print("Batch initial status: {}".format(batch_detail.status)) print("Number of translations on documents: {}".format(batch_detail.summary.total)) - batch_result = poller.result() # type: BatchStatusDetail - if batch_result.status == "Succeeded": + batch_detail = poller.result() # type: BatchStatusDetail + if batch_detail.status == "Succeeded": print("We translated our documents!") - if batch_result.summary.failed > 0: + if batch_detail.summary.failed > 0: check_documents(client, batch_detail.id) - if batch_result.status == "Failed": - print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) - check_documents(client, batch_detail.id) + if batch_detail.status in ["Failed", "ValidationFailed"]: + if batch_detail.error: + print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) + check_documents(client, batch_detail.id) + exit(1) def check_documents(client, batch_id): - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container.") + raise err + + docs_to_retry = [] for document in doc_statuses: if document.status == "Failed": print("Document at {} failed to be translated to {} language".format( - document.document_url, document.translate_to + document.url, document.translate_to )) print("Document ID: {}, Error Code: {}, Message: {}".format( document.id, document.error.code, document.error.message )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py index c57fcfafbb99..5138d7bdef3d 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py @@ -23,7 +23,7 @@ def sample_list_all_batches(): print("Batch ID: {}".format(batch.id)) print("Batch status: {}".format(batch.status)) print("Batch created on: {}".format(batch.created_on)) - print("Batch last updated on: {}".format(batch.last_updated)) + print("Batch last updated on: {}".format(batch.last_updated_on)) print("Batch number of translations on documents: {}".format(batch.summary.total)) print("Of total documents...") From ee2761db1645d89c5866dacd2221e2ea239f3127 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Tue, 23 Feb 2021 17:07:35 -0800 Subject: [PATCH 3/8] updates from feedback --- .../azure/ai/documenttranslation/__init__.py | 18 +-- .../azure/ai/documenttranslation/_client.py | 40 +++-- .../azure/ai/documenttranslation/_helpers.py | 2 +- .../azure/ai/documenttranslation/_models.py | 137 +++++++----------- .../azure/ai/documenttranslation/_polling.py | 38 +++++ .../documenttranslation/aio/_client_async.py | 41 +++--- .../samples/sample_azure_storage.py | 23 ++- .../samples/sample_batch_translation.py | 26 ++-- .../samples/sample_cancel_batch.py | 24 ++- .../samples/sample_check_statuses.py | 27 ++-- .../samples/sample_custom_translation.py | 31 ++-- .../samples/sample_list_batches.py | 14 +- 12 files changed, 195 insertions(+), 226 deletions(-) create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py index a439cb77b539..8f919cb5a027 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py @@ -11,16 +11,15 @@ ) from ._api_version import DocumentTranslationVersion from ._models import ( - StorageSourceInput, - StorageTargetInput, + StorageTarget, BatchStatusDetail, DocumentStatusDetail, DocumentTranslationError, TranslationGlossary, - BatchDocumentInput, - StatusSummary, + BatchTranslationInput, FileFormat ) +from ._polling import DocumentTranslationPoller __VERSION__ = VERSION @@ -28,14 +27,13 @@ __all__ = [ "DocumentTranslationClient", "DocumentTranslationVersion", - "BatchDocumentInput", + "BatchTranslationInput", "TranslationGlossary", "StorageInputType", - "StatusSummary", "FileFormat", - "StorageSourceInput", - "StorageTargetInput", + "StorageTarget", "BatchStatusDetail", "DocumentStatusDetail", - "DocumentTranslationError" -] \ No newline at end of file + "DocumentTranslationError", + "DocumentTranslationPoller" +] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py index 18c52d99c19f..023df10e822c 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py @@ -13,7 +13,7 @@ if TYPE_CHECKING: from azure.core.paging import ItemPaged from azure.core.credentials import AzureKeyCredential, TokenCredential - from ._models import BatchStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat + from ._models import BatchStatusDetail, DocumentStatusDetail, BatchTranslationInput, FileFormat class DocumentTranslationClient(object): @@ -47,11 +47,11 @@ def __init__(self, endpoint, credential, **kwargs): @distributed_trace def begin_batch_translation(self, inputs, **kwargs): - # type: (List[BatchDocumentInput], **Any) -> LROPoller[BatchStatusDetail] + # type: (List[BatchTranslationInput], **Any) -> LROPoller[BatchStatusDetail] """ :param inputs: - :type inputs: List[~azure.ai.documenttranslation.BatchDocumentInput] + :type inputs: List[~azure.ai.documenttranslation.BatchTranslationInput] :return: LROPoller :rtype: ~azure.core.polling.LROPoller[BatchStatusDetail] """ @@ -64,34 +64,30 @@ def begin_batch_translation(self, inputs, **kwargs): @distributed_trace def get_batch_status(self, batch_id, **kwargs): - # type: (Union[LROPoller, str], **Any) -> BatchStatusDetail + # type: (str, **Any) -> BatchStatusDetail """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.LROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :rtype: ~azure.ai.documenttranslation.BatchStatusDetail """ - if isinstance(batch_id, LROPoller): - batch_id = batch_id.batch_id return self._client.document_translation.get_operation_status(batch_id, **kwargs) @distributed_trace def cancel_batch(self, batch_id, **kwargs): - # type: (Union[LROPoller, str], **Any) -> None + # type: (str, **Any) -> None """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.LROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :rtype: None """ - if isinstance(batch_id, LROPoller): - batch_id = batch_id.batch_id self._client.document_translation.cancel_operation(batch_id, **kwargs) @distributed_trace - def list_batches(self, **kwargs): + def list_statuses_of_batches(self, **kwargs): # type: (**Any) -> ItemPaged[BatchStatusDetail] """ @@ -102,28 +98,26 @@ def list_batches(self, **kwargs): return self._client.document_translation.get_operations(**kwargs) @distributed_trace - def list_documents_statuses(self, batch_id, **kwargs): - # type: (Union[LROPoller, str], **Any) -> ItemPaged[DocumentStatusDetail] + def list_statuses_of_documents(self, batch_id, **kwargs): + # type: (str, **Any) -> ItemPaged[DocumentStatusDetail] """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.LROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :keyword int results_per_page: :keyword int skip: :rtype: ~azure.core.paging.ItemPaged[DocumentStatusDetail] """ - if isinstance(batch_id, LROPoller): - batch_id = batch_id.batch_id return self._client.document_translation.get_operation_documents_status(batch_id, **kwargs) @distributed_trace def get_document_status(self, batch_id, document_id, **kwargs): - # type: (Union[LROPoller, str], str, **Any) -> DocumentStatusDetail + # type: (str, str, **Any) -> DocumentStatusDetail """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.LROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :param document_id: guid id for document :type document_id: str :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py index 059be9359f9d..c9a81ef84b07 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_helpers.py @@ -22,4 +22,4 @@ def get_authentication_policy(credential): raise TypeError("Unsupported credential: {}. Use an instance of AzureKeyCredential " "or a token credential from azure.identity".format(type(credential))) - return authentication_policy \ No newline at end of file + return authentication_policy diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py index 399289aec026..f43bafcc69b8 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py @@ -7,37 +7,6 @@ from typing import Any, List -class StorageSourceInput(object): - """Source of the input documents. - - :param source_url: Required. Location of the folder / container or single file with your - documents. - :type source_url: str - :param str language: Language code - If none is specified, we will perform auto detect on the document. - :keyword str prefix: A case-sensitive prefix string to filter documents in the source path for - translation. For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for - translation. - :keyword str suffix: A case-sensitive suffix string to filter documents in the source path for - translation. This is most often use for file extensions. - :ivar storage_source: Storage Source. Default value: "AzureBlob". - :vartype storage_source: str - """ - - def __init__( - self, - source_url, - language, - **kwargs - ): - # type: (str, str, **Any) -> None - self.storage_source = "AzureBlob" - self.source_url = source_url - self.language = language - self.prefix = kwargs.get("prefix", None) - self.suffix = kwargs.get("suffix", None) - - class TranslationGlossary(object): """Glossary / translation memory for the request. @@ -47,9 +16,9 @@ class TranslationGlossary(object): If the translation language pair is not present in the glossary, it will not be applied. :type glossary_url: str :keyword str format: Format. - :keyword str version: Version. - :ivar storage_source: Storage Source. Default value: "AzureBlob". - :vartype storage_source: str + :keyword str format_version: Format version. + :keyword storage_source: Storage Source. Default value: "AzureBlob". + :paramtype storage_source: str """ def __init__( @@ -58,24 +27,24 @@ def __init__( **kwargs ): # type: (str, **Any) -> None - self.storage_source = "AzureBlob" self.glossary_url = glossary_url self.format = kwargs.get("format", None) - self.version = kwargs.get("version", None) + self.format_version = kwargs.get("format_version", None) + self.storage_source = kwargs.get("storage_source", None) -class StorageTargetInput(object): +class StorageTarget(object): """Destination for the finished translated documents. :param target_url: Required. Location of the folder / container with your documents. :type target_url: str :param language: Required. Target Language. :type language: str - :keyword str custom_model_id: Category / custom system for translation request. + :keyword str category_id: Category / custom system for translation request. :keyword glossaries: List of TranslationGlossary. :paramtype glossaries: Union[list[str], list[~azure.ai.documenttranslation.TranslationGlossary]] - :ivar storage_source: Storage Source. Default value: "AzureBlob". - :vartype storage_source: str + :keyword storage_source: Storage Source. Default value: "AzureBlob". + :paramtype storage_source: str """ def __init__( @@ -85,35 +54,49 @@ def __init__( **kwargs ): # type: (str, str, **Any) -> None - self.storage_source = "AzureBlob" self.target_url = target_url self.language = language - self.custom_model_id = kwargs.get("custom_model_id", None) + self.category_id = kwargs.get("category_id", None) self.glossaries = kwargs.get("glossaries", None) + self.storage_source = kwargs.get("storage_source", None) -class BatchDocumentInput(object): +class BatchTranslationInput(object): """Definition for the input batch translation request. - :param source: Required. Source of the input documents. - :type source: ~azure.ai.documenttranslation.StorageSourceInput + :param source_url: Required. Location of the folder / container or single file with your + documents. + :type source_url: str + :param str source_language: Language code + If none is specified, we will perform auto detect on the document. :param targets: Required. Location of the destination for the output. - :type targets: list[StorageTargetInput] + :type targets: list[StorageTarget] + :keyword str prefix: A case-sensitive prefix string to filter documents in the source path for + translation. For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for + translation. + :keyword str suffix: A case-sensitive suffix string to filter documents in the source path for + translation. This is most often use for file extensions. :keyword storage_type: Storage type of the input documents source string. Possible values include: "Folder", "File". :paramtype storage_type: str or ~azure.ai.documenttranslation.StorageInputType + :keyword str storage_source: Storage Source. Default value: "AzureBlob". """ def __init__( self, - source, + source_url, + source_language, targets, **kwargs ): - # type: (StorageSourceInput, List[StorageTargetInput], **Any) -> None - self.source = source + # type: (str, str, List[StorageTarget], **Any) -> None + self.source_url = source_url + self.source_language = source_language self.targets = targets self.storage_type = kwargs.get("storage_type", None) + self.storage_source = kwargs.get("storage_source", None) + self.prefix = kwargs.get("prefix", None) + self.suffix = kwargs.get("suffix", None) class BatchStatusDetail(object): @@ -130,11 +113,17 @@ class BatchStatusDetail(object): include: "NotStarted", "Running", "Succeeded", "Failed", "Cancelled", "Cancelling", "ValidationFailed". :vartype status: str - :ivar summary: Required. - :vartype summary: ~azure.ai.documenttranslation.StatusSummary :ivar error: This contains an outer error with error code, message, details, target and an inner error with more descriptive details. :vartype error: ~azure.ai.documenttranslation.DocumentTranslationError + :ivar int documents_total_count: Total count. + :ivar int documents_failed_count: Failed count. + :ivar int documents_succeeded_count: Number of Success. + :ivar int documents_in_progress_count: Number of in progress. + :ivar int documents_not_yet_started_count: Count of not yet started. + :ivar int documents_cancelled_count: Number of cancelled. + :ivar int total_characters_charged: Required. Total characters charged by the API. + """ def __init__( @@ -146,8 +135,14 @@ def __init__( self.created_on = kwargs['created_on'] self.last_updated_on = kwargs['last_updated_on'] self.status = kwargs.get('status', None) - self.summary = kwargs['summary'] self.error = kwargs.get("error", None) + self.documents_total_count = kwargs.get('documents_total_count', None) + self.documents_failed_count = kwargs.get('documents_failed_count', None) + self.documents_succeeded_count = kwargs.get('documents_succeeded_count', None) + self.documents_in_progress_count = kwargs.get('documents_in_progress_count', None) + self.documents_not_yet_started_count = kwargs.get('documents_not_yet_started_count', None) + self.documents_cancelled_count = kwargs.get('documents_cancelled_count', None) + self.total_characters_charged = kwargs.get('total_characters_charged', None) class DocumentStatusDetail(object): @@ -169,8 +164,8 @@ class DocumentStatusDetail(object): :ivar error: This contains an outer error with error code, message, details, target and an inner error with more descriptive details. :vartype error: ~azure.ai.documenttranslation.DocumentTranslationError - :ivar progress: Progress of the translation if available. - :vartype progress: float + :ivar translation_progress: Progress of the translation if available. + :vartype translation_progress: float :ivar id: Document Id. :vartype id: str :ivar int characters_charged: Character charged by the API. @@ -187,7 +182,7 @@ def __init__( self.status = kwargs['status'] self.translate_to = kwargs['translate_to'] self.error = kwargs.get('error', None) - self.progress = kwargs.get('progress', None) + self.translation_progress = kwargs.get('translation_progress', None) self.id = kwargs.get('id', None) self.characters_charged = kwargs.get('characters_charged', None) @@ -217,38 +212,6 @@ def __init__( self.target = None -class StatusSummary(object): - """StatusSummary. - - :ivar total: Total count. - :vartype total: int - :ivar failed: Failed count. - :vartype failed: int - :ivar succeeded: Number of Success. - :vartype succeeded: int - :ivar in_progress: Number of in progress. - :vartype in_progress: int - :ivar not_yet_started: Count of not yet started. - :vartype not_yet_started: int - :ivar cancelled: Number of cancelled. - :vartype cancelled: int - :ivar int total_characters_charged: Required. Total characters charged by the API. - """ - - def __init__( - self, - **kwargs - ): - # type: (**Any) -> None - self.total = kwargs.get('total', None) - self.failed = kwargs.get('failed', None) - self.succeeded = kwargs.get('succeeded', None) - self.in_progress = kwargs.get('in_progress', None) - self.not_yet_started = kwargs.get('not_yet_started', None) - self.cancelled = kwargs.get('cancelled', None) - self.total_characters_charged = kwargs.get('total_characters_charged', None) - - class FileFormat(object): """FileFormat. diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py new file mode 100644 index 000000000000..f3c29e7dfa73 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py @@ -0,0 +1,38 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import logging +from typing import TYPE_CHECKING, TypeVar +from azure.core.polling import LROPoller, PollingMethod +if TYPE_CHECKING: + from typing import Any + + +PollingReturnType = TypeVar("PollingReturnType") + +_LOGGER = logging.getLogger(__name__) + + +class DocumentTranslationPoller(LROPoller): + # TODO - this is temporary class. we will generate with the custom poller + + @property + def batch_id(self): + return self._polling_method._operation._async_url.split("/batches/")[1] + + @classmethod + def from_continuation_token(cls, polling_method, continuation_token, **kwargs): + # type: (PollingMethod[PollingReturnType], str, **Any) -> DocumentTranslationPoller[PollingReturnType] + """ + :param polling_method: + :type polling_method: ~azure.core.polling.PollingMethod + :param str continuation_token: + :return: DocumentTranslationPoller + """ + client, initial_response, deserialization_callback = polling_method.from_continuation_token( + continuation_token, **kwargs + ) + return cls(client, initial_response, deserialization_callback, polling_method) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py index 19f58c9c8b6b..7246fddc36a4 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py @@ -11,7 +11,7 @@ from azure.core.async_paging import AsyncItemPaged from .._generated.aio import BatchDocumentTranslationClient as _BatchDocumentTranslationClient from .._user_agent import USER_AGENT -from .._models import BatchStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat +from .._models import BatchStatusDetail, DocumentStatusDetail, BatchTranslationInput, FileFormat from .._helpers import get_authentication_policy if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -51,11 +51,11 @@ def __init__( @distributed_trace_async async def begin_batch_translation(self, inputs, **kwargs): - # type: (List[BatchDocumentInput], **Any) -> AsyncLROPoller[BatchStatusDetail] + # type: (List[BatchTranslationInput], **Any) -> AsyncLROPoller[BatchStatusDetail] """ :param inputs: - :type inputs: List[~azure.ai.documenttranslation.BatchDocumentInput] + :type inputs: List[~azure.ai.documenttranslation.BatchTranslationInput] :rtype: ~azure.core.polling.AsyncLROPoller[BatchStatusDetail] """ return await self._client.document_translation.begin_submit_batch_request( @@ -66,34 +66,29 @@ async def begin_batch_translation(self, inputs, **kwargs): @distributed_trace_async async def get_batch_status(self, batch_id, **kwargs): - # type: (Union[AsyncLROPoller, str], **Any) -> BatchStatusDetail + # type: (str, **Any) -> BatchStatusDetail """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :rtype: ~azure.ai.documenttranslation.BatchStatusDetail """ - if isinstance(batch_id, AsyncLROPoller): - batch_id = batch_id.batch_id - return await self._client.document_translation.get_operation_status(batch_id, **kwargs) @distributed_trace_async async def cancel_batch(self, batch_id, **kwargs): - # type: (Union[AsyncLROPoller, str], **Any) -> None + # type: (str, **Any) -> None """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :rtype: None """ - if isinstance(batch_id, AsyncLROPoller): - batch_id = batch_id.batch_id await self._client.document_translation.cancel_operation(batch_id, **kwargs) @distributed_trace - def list_batches(self, **kwargs): + def list_statuses_of_batches(self, **kwargs): # type: (**Any) -> AsyncItemPaged[BatchStatusDetail] """ @@ -104,28 +99,26 @@ def list_batches(self, **kwargs): return self._client.document_translation.get_operations(**kwargs) @distributed_trace - def list_documents_statuses(self, batch_id, **kwargs): - # type: (Union[AsyncLROPoller, str], **Any) -> AsyncItemPaged[DocumentStatusDetail] + def list_statuses_of_documents(self, batch_id, **kwargs): + # type: (str, **Any) -> AsyncItemPaged[DocumentStatusDetail] """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :keyword int results_per_page: :keyword int skip: :rtype: ~azure.core.async_paging.AsyncItemPaged[DocumentStatusDetail] """ - if isinstance(batch_id, AsyncLROPoller): - batch_id = batch_id.batch_id return self._client.document_translation.get_operation_documents_status(batch_id, **kwargs) @distributed_trace_async async def get_document_status(self, batch_id, document_id, **kwargs): - # type: (Union[AsyncLROPoller, str], str, **Any) -> DocumentStatusDetail + # type: (str, str, **Any) -> DocumentStatusDetail """ - :param batch_id: guid id for batch or poller object - :type batch_id: Union[~azure.core.polling.AsyncLROPoller, str] + :param batch_id: guid id for batch + :type batch_id: str :param document_id: guid id for document :type document_id: str :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py index 7fc7212fe3ae..01cf57a3c76a 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py @@ -10,9 +10,8 @@ def batch_translation_with_storage(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchDocumentInput, - StorageSourceInput, - StorageTargetInput + BatchTranslationInput, + StorageTarget ) from azure.storage.blob import ContainerClient, generate_container_sas, ContainerSasPermissions @@ -58,18 +57,16 @@ def batch_translation_with_storage(): target_container_url = target_storage_endpoint + "/" + target_storage_container_name + "?" + target_container_sas batch = [ - BatchDocumentInput( - source=StorageSourceInput( - source_url=source_container_url, - language="en", - prefix="document" - ), + BatchTranslationInput( + source_url=source_container_url, + source_language="en", targets=[ - StorageTargetInput( + StorageTarget( target_url=target_container_url, language="es" ) - ] + ], + prefix="document" ) ] @@ -78,7 +75,7 @@ def batch_translation_with_storage(): batch_detail = poller.result() if batch_detail.status == "Succeeded": print("We translated our documents!") - if batch_detail.summary.failed > 0: + if batch_detail.documents_failed_count > 0: check_documents(batch_client, batch_detail.id) if batch_detail.status in ["Failed", "ValidationFailed"]: @@ -104,7 +101,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py index a8a0912fc322..9e2c6c04abf4 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py @@ -10,9 +10,8 @@ def sample_batch_translation(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchDocumentInput, - StorageSourceInput, - StorageTargetInput + BatchTranslationInput, + StorageTarget ) endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] @@ -24,18 +23,15 @@ def sample_batch_translation(): client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchDocumentInput( - source=StorageSourceInput( - source_url=source_container_url, - language="en", - prefix="document_2021" - ), + BatchTranslationInput( + source_url=source_container_url, + source_language="en", targets=[ - StorageTargetInput( + StorageTarget( target_url=target_container_url_es, language="es" ), - StorageTargetInput( + StorageTarget( target_url=target_container_url_fr, language="fr" ) @@ -46,15 +42,15 @@ def sample_batch_translation(): poller = client.begin_batch_translation(batch) - batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail print("Batch initial status: {}".format(batch_detail.status)) - print("Number of translations on documents: {}".format(batch_detail.summary.total)) + print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) batch_detail = poller.result() # type: BatchStatusDetail if batch_detail.status == "Succeeded": print("We translated our documents!") - if batch_detail.summary.failed > 0: + if batch_detail.documents_failed_count > 0: check_documents(client, batch_detail.id) if batch_detail.status in ["Failed", "ValidationFailed"]: @@ -68,7 +64,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py index fdb56a2856f7..a53f1d0c0934 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py @@ -10,9 +10,8 @@ def sample_cancel_batch(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchDocumentInput, - StorageSourceInput, - StorageTargetInput + BatchTranslationInput, + StorageTarget ) endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] @@ -23,14 +22,11 @@ def sample_cancel_batch(): client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchDocumentInput( - source=StorageSourceInput( - source_url=source_container_url, - language="en", - prefix="document_2021" - ), + BatchTranslationInput( + source_url=source_container_url, + source_language="en", targets=[ - StorageTargetInput( + StorageTarget( target_url=target_container_url_es, language="es" ) @@ -41,13 +37,13 @@ def sample_cancel_batch(): poller = client.begin_batch_translation(batch) - batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail print("Batch status: {}".format(batch_detail.status)) - print("Number of translations on documents: {}".format(batch_detail.summary.total)) + print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) - client.cancel_batch(poller) - detail = client.get_batch_status(poller) # type: BatchStatusDetail + client.cancel_batch(batch_detail.id) + detail = client.get_batch_status(batch_detail.id) # type: BatchStatusDetail if detail.status in ["Cancelled", "Cancelling"]: print("We cancelled batch with ID: {}".format(detail.id)) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py index 76312472af2f..1349975f9417 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py @@ -11,9 +11,8 @@ def sample_batch_translation(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchDocumentInput, - StorageSourceInput, - StorageTargetInput + BatchTranslationInput, + StorageTarget ) endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] @@ -25,30 +24,28 @@ def sample_batch_translation(): client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchDocumentInput( - source=StorageSourceInput( - source_url=source_container_url, - language="en", - prefix="document_2021" - ), + BatchTranslationInput( + source_url=source_container_url, + source_language="en", targets=[ - StorageTargetInput( + StorageTarget( target_url=target_container_url_es, language="es" ), - StorageTargetInput( + StorageTarget( target_url=target_container_url_fr, language="fr" ) ], - storage_type="folder" + storage_type="folder", + prefix="document_2021" ) ] poller = client.begin_batch_translation(batch) while True: - batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail if batch_detail.status in ["NotStarted", "Running"]: time.sleep(5) continue @@ -61,7 +58,7 @@ def sample_batch_translation(): if batch_detail.status == "Succeeded": print("We translated our documents!") - if batch_detail.summary.failed > 0: + if batch_detail.documents_failed_count > 0: check_documents(client, batch_detail.id) break @@ -70,7 +67,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py index d1806bbd35c1..e1625af2eec5 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py @@ -10,46 +10,43 @@ def sample_custom_translation(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchDocumentInput, - StorageSourceInput, - StorageTargetInput + BatchTranslationInput, + StorageTarget ) endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] - custom_model_id = os.environ["AZURE_DOCUMENT_TRANSLATION_MODEL_ID"] + category_id = os.environ["AZURE_DOCUMENT_TRANSLATION_MODEL_ID"] client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchDocumentInput( - source=StorageSourceInput( - source_url=source_container_url, - language="en", - prefix="document_2021" - ), + BatchTranslationInput( + source_url=source_container_url, + source_language="en", targets=[ - StorageTargetInput( + StorageTarget( target_url=target_container_url_fr, language="fr", - custom_model_id=custom_model_id + category_id=category_id ) - ] + ], + prefix="document_2021" ) ] poller = client.begin_batch_translation(batch) - batch_detail = client.get_batch_status(poller) # type: BatchStatusDetail + batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail print("Batch initial status: {}".format(batch_detail.status)) - print("Number of translations on documents: {}".format(batch_detail.summary.total)) + print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) batch_detail = poller.result() # type: BatchStatusDetail if batch_detail.status == "Succeeded": print("We translated our documents!") - if batch_detail.summary.failed > 0: + if batch_detail.documents_failed_count > 0: check_documents(client, batch_detail.id) if batch_detail.status in ["Failed", "ValidationFailed"]: @@ -63,7 +60,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py index 5138d7bdef3d..5e0147d00718 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py @@ -16,7 +16,7 @@ def sample_list_all_batches(): key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) - batches = client.list_batches() + batches = client.list_statuses_of_batches() print("Batches summary") for batch in batches: @@ -24,11 +24,11 @@ def sample_list_all_batches(): print("Batch status: {}".format(batch.status)) print("Batch created on: {}".format(batch.created_on)) print("Batch last updated on: {}".format(batch.last_updated_on)) - print("Batch number of translations on documents: {}".format(batch.summary.total)) + print("Batch number of translations on documents: {}".format(batch.documents_total_count)) print("Of total documents...") - print("{} failed".format(batch.summary.failed)) - print("{} succeeded".format(batch.summary.succeeded)) - print("{} in progress".format(batch.summary.in_progress)) - print("{} not yet started".format(batch.summary.not_yet_started)) - print("{} cancelled".format(batch.summary.cancelled)) + print("{} failed".format(batch.documents_failed_count)) + print("{} succeeded".format(batch.documents_succeeded_count)) + print("{} in progress".format(batch.documents_in_progress_count)) + print("{} not yet started".format(batch.documents_not_yet_started_count)) + print("{} cancelled".format(batch.documents_cancelled_count)) From 7bd3bb4241aa149a5f8afebf8fbee7f1afb9d034 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Wed, 24 Feb 2021 16:08:22 -0800 Subject: [PATCH 4/8] johans feedback --- .../azure/ai/documenttranslation/__init__.py | 2 - .../azure/ai/documenttranslation/_client.py | 24 +++++++++--- .../azure/ai/documenttranslation/_polling.py | 38 ------------------- .../documenttranslation/aio/_client_async.py | 22 ++++++++--- .../samples/sample_azure_storage.py | 20 +++++----- .../samples/sample_batch_translation.py | 22 +++++------ .../samples/sample_cancel_batch.py | 4 +- .../samples/sample_check_statuses.py | 6 +-- .../samples/sample_custom_translation.py | 21 +++++----- .../samples/sample_list_batches.py | 2 +- 10 files changed, 70 insertions(+), 91 deletions(-) delete mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py index 8f919cb5a027..b6ff229e244a 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py @@ -19,7 +19,6 @@ BatchTranslationInput, FileFormat ) -from ._polling import DocumentTranslationPoller __VERSION__ = VERSION @@ -35,5 +34,4 @@ "BatchStatusDetail", "DocumentStatusDetail", "DocumentTranslationError", - "DocumentTranslationPoller" ] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py index 023df10e822c..131f25ffc144 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py @@ -46,14 +46,14 @@ def __init__(self, endpoint, credential, **kwargs): ) @distributed_trace - def begin_batch_translation(self, inputs, **kwargs): - # type: (List[BatchTranslationInput], **Any) -> LROPoller[BatchStatusDetail] + def create_batch(self, inputs, **kwargs): + # type: (List[BatchTranslationInput], **Any) -> BatchStatusDetail """ :param inputs: :type inputs: List[~azure.ai.documenttranslation.BatchTranslationInput] - :return: LROPoller - :rtype: ~azure.core.polling.LROPoller[BatchStatusDetail] + :return: BatchStatusDetail + :rtype: BatchStatusDetail """ return self._client.document_translation.begin_submit_batch_request( @@ -87,7 +87,19 @@ def cancel_batch(self, batch_id, **kwargs): self._client.document_translation.cancel_operation(batch_id, **kwargs) @distributed_trace - def list_statuses_of_batches(self, **kwargs): + def wait_until_done(self, batch_id, **kwargs): + # type: (str, **Any) -> BatchStatusDetail + """ + + :param batch_id: guid id for batch + :type batch_id: str + :return: BatchStatusDetail + :rtype: BatchStatusDetail + """ + pass + + @distributed_trace + def list_batches_statuses(self, **kwargs): # type: (**Any) -> ItemPaged[BatchStatusDetail] """ @@ -98,7 +110,7 @@ def list_statuses_of_batches(self, **kwargs): return self._client.document_translation.get_operations(**kwargs) @distributed_trace - def list_statuses_of_documents(self, batch_id, **kwargs): + def list_documents_statuses(self, batch_id, **kwargs): # type: (str, **Any) -> ItemPaged[DocumentStatusDetail] """ diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py deleted file mode 100644 index f3c29e7dfa73..000000000000 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_polling.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ - -import logging -from typing import TYPE_CHECKING, TypeVar -from azure.core.polling import LROPoller, PollingMethod -if TYPE_CHECKING: - from typing import Any - - -PollingReturnType = TypeVar("PollingReturnType") - -_LOGGER = logging.getLogger(__name__) - - -class DocumentTranslationPoller(LROPoller): - # TODO - this is temporary class. we will generate with the custom poller - - @property - def batch_id(self): - return self._polling_method._operation._async_url.split("/batches/")[1] - - @classmethod - def from_continuation_token(cls, polling_method, continuation_token, **kwargs): - # type: (PollingMethod[PollingReturnType], str, **Any) -> DocumentTranslationPoller[PollingReturnType] - """ - :param polling_method: - :type polling_method: ~azure.core.polling.PollingMethod - :param str continuation_token: - :return: DocumentTranslationPoller - """ - client, initial_response, deserialization_callback = polling_method.from_continuation_token( - continuation_token, **kwargs - ) - return cls(client, initial_response, deserialization_callback, polling_method) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py index 7246fddc36a4..39745494b7f1 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py @@ -50,13 +50,13 @@ def __init__( ) @distributed_trace_async - async def begin_batch_translation(self, inputs, **kwargs): - # type: (List[BatchTranslationInput], **Any) -> AsyncLROPoller[BatchStatusDetail] + async def create_batch(self, inputs, **kwargs): + # type: (List[BatchTranslationInput], **Any) -> BatchStatusDetail """ :param inputs: :type inputs: List[~azure.ai.documenttranslation.BatchTranslationInput] - :rtype: ~azure.core.polling.AsyncLROPoller[BatchStatusDetail] + :rtype: BatchStatusDetail """ return await self._client.document_translation.begin_submit_batch_request( inputs=inputs, @@ -75,6 +75,18 @@ async def get_batch_status(self, batch_id, **kwargs): """ return await self._client.document_translation.get_operation_status(batch_id, **kwargs) + @distributed_trace_async + async def wait_until_done(self, batch_id, **kwargs): + # type: (str, **Any) -> BatchStatusDetail + """ + + :param batch_id: guid id for batch + :type batch_id: str + :return: BatchStatusDetail + :rtype: BatchStatusDetail + """ + pass + @distributed_trace_async async def cancel_batch(self, batch_id, **kwargs): # type: (str, **Any) -> None @@ -88,7 +100,7 @@ async def cancel_batch(self, batch_id, **kwargs): await self._client.document_translation.cancel_operation(batch_id, **kwargs) @distributed_trace - def list_statuses_of_batches(self, **kwargs): + def list_batches_statuses(self, **kwargs): # type: (**Any) -> AsyncItemPaged[BatchStatusDetail] """ @@ -99,7 +111,7 @@ def list_statuses_of_batches(self, **kwargs): return self._client.document_translation.get_operations(**kwargs) @distributed_trace - def list_statuses_of_documents(self, batch_id, **kwargs): + def list_documents_statuses(self, batch_id, **kwargs): # type: (str, **Any) -> AsyncItemPaged[DocumentStatusDetail] """ diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py index 01cf57a3c76a..65886cb83258 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py @@ -70,18 +70,18 @@ def batch_translation_with_storage(): ) ] - poller = batch_client.begin_batch_translation(batch) + batch_detail = batch_client.create_batch(batch) + batch_result = batch_client.wait_until_done(batch_detail.id) - batch_detail = poller.result() - if batch_detail.status == "Succeeded": + if batch_result.status == "Succeeded": print("We translated our documents!") - if batch_detail.documents_failed_count > 0: - check_documents(batch_client, batch_detail.id) + if batch_result.documents_failed_count > 0: + check_documents(batch_client, batch_result.id) - if batch_detail.status in ["Failed", "ValidationFailed"]: - if batch_detail.error: - print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) - check_documents(batch_client, batch_detail.id) + if batch_result.status in ["Failed", "ValidationFailed"]: + if batch_result.error: + print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) + check_documents(batch_client, batch_result.id) exit(1) container_client = ContainerClient( @@ -101,7 +101,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py index 9e2c6c04abf4..740d68bf1b25 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py @@ -40,23 +40,21 @@ def sample_batch_translation(): ) ] - poller = client.begin_batch_translation(batch) - - batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail + batch_detail = client.create_batch(batch) # type: BatchStatusDetail print("Batch initial status: {}".format(batch_detail.status)) print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) - batch_detail = poller.result() # type: BatchStatusDetail - if batch_detail.status == "Succeeded": + batch_result = client.wait_until_done(batch_detail.id) # type: BatchStatusDetail + if batch_result.status == "Succeeded": print("We translated our documents!") - if batch_detail.documents_failed_count > 0: - check_documents(client, batch_detail.id) + if batch_result.documents_failed_count > 0: + check_documents(client, batch_result.id) - if batch_detail.status in ["Failed", "ValidationFailed"]: - if batch_detail.error: - print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) - check_documents(client, batch_detail.id) + if batch_result.status in ["Failed", "ValidationFailed"]: + if batch_result.error: + print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) + check_documents(client, batch_result.id) exit(1) @@ -64,7 +62,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py index a53f1d0c0934..cd97ebc878d3 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py @@ -35,9 +35,7 @@ def sample_cancel_batch(): ) ] - poller = client.begin_batch_translation(batch) - - batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail + batch_detail = client.create_batch(batch) # type: BatchStatusDetail print("Batch status: {}".format(batch_detail.status)) print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py index 1349975f9417..c3d97b209953 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py @@ -42,10 +42,10 @@ def sample_batch_translation(): ) ] - poller = client.begin_batch_translation(batch) + batch_detail = client.create_batch(batch) while True: - batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail + batch_detail = client.get_batch_status(batch_detail.id) # type: BatchStatusDetail if batch_detail.status in ["NotStarted", "Running"]: time.sleep(5) continue @@ -67,7 +67,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py index e1625af2eec5..745a9f46a548 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py @@ -37,22 +37,21 @@ def sample_custom_translation(): ) ] - poller = client.begin_batch_translation(batch) - batch_detail = client.get_batch_status(poller.batch_id) # type: BatchStatusDetail + batch_detail = client.create_batch(batch) # type: BatchStatusDetail print("Batch initial status: {}".format(batch_detail.status)) print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) - batch_detail = poller.result() # type: BatchStatusDetail - if batch_detail.status == "Succeeded": + batch_result = client.wait_until_done(batch_detail.id) # type: BatchStatusDetail + if batch_result.status == "Succeeded": print("We translated our documents!") - if batch_detail.documents_failed_count > 0: - check_documents(client, batch_detail.id) + if batch_result.documents_failed_count > 0: + check_documents(client, batch_result.id) - if batch_detail.status in ["Failed", "ValidationFailed"]: - if batch_detail.error: - print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) - check_documents(client, batch_detail.id) + if batch_result.status in ["Failed", "ValidationFailed"]: + if batch_result.error: + print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) + check_documents(client, batch_result.id) exit(1) @@ -60,7 +59,7 @@ def check_documents(client, batch_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_statuses_of_documents(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py index 5e0147d00718..0abb8a57a468 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py @@ -16,7 +16,7 @@ def sample_list_all_batches(): key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) - batches = client.list_statuses_of_batches() + batches = client.list_batches_statuses() print("Batches summary") for batch in batches: From 0a4d34c2bb34c162b72be777f9ecb63e441576c5 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Wed, 24 Feb 2021 18:23:29 -0800 Subject: [PATCH 5/8] renaming to use job terminology --- .../azure/ai/documenttranslation/__init__.py | 8 +- .../azure/ai/documenttranslation/_client.py | 71 ++++++++-------- .../azure/ai/documenttranslation/_models.py | 6 +- .../documenttranslation/aio/_client_async.py | 82 ++++++++++--------- .../samples/sample_azure_storage.py | 28 +++---- .../samples/sample_batch_translation.py | 30 +++---- ...ch.py => sample_cancel_translation_job.py} | 18 ++-- .../samples/sample_check_statuses.py | 30 +++---- .../samples/sample_custom_translation.py | 30 +++---- .../samples/sample_list_batches.py | 27 +++--- 10 files changed, 165 insertions(+), 165 deletions(-) rename sdk/documenttranslation/azure-ai-documenttranslation/samples/{sample_cancel_batch.py => sample_cancel_translation_job.py} (66%) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py index b6ff229e244a..6aa71f6e8164 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/__init__.py @@ -12,11 +12,11 @@ from ._api_version import DocumentTranslationVersion from ._models import ( StorageTarget, - BatchStatusDetail, + JobStatusDetail, DocumentStatusDetail, DocumentTranslationError, TranslationGlossary, - BatchTranslationInput, + BatchDocumentInput, FileFormat ) @@ -26,12 +26,12 @@ __all__ = [ "DocumentTranslationClient", "DocumentTranslationVersion", - "BatchTranslationInput", + "BatchDocumentInput", "TranslationGlossary", "StorageInputType", "FileFormat", "StorageTarget", - "BatchStatusDetail", + "JobStatusDetail", "DocumentStatusDetail", "DocumentTranslationError", ] diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py index 131f25ffc144..e48ec7f1148b 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_client.py @@ -5,7 +5,6 @@ # ------------------------------------ from typing import Union, Any, TYPE_CHECKING, List -from azure.core.polling import LROPoller from azure.core.tracing.decorator import distributed_trace from ._generated import BatchDocumentTranslationClient as _BatchDocumentTranslationClient from ._helpers import get_authentication_policy @@ -13,7 +12,7 @@ if TYPE_CHECKING: from azure.core.paging import ItemPaged from azure.core.credentials import AzureKeyCredential, TokenCredential - from ._models import BatchStatusDetail, DocumentStatusDetail, BatchTranslationInput, FileFormat + from ._models import JobStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat class DocumentTranslationClient(object): @@ -46,95 +45,95 @@ def __init__(self, endpoint, credential, **kwargs): ) @distributed_trace - def create_batch(self, inputs, **kwargs): - # type: (List[BatchTranslationInput], **Any) -> BatchStatusDetail + def create_translation_job(self, batch, **kwargs): + # type: (List[BatchDocumentInput], **Any) -> JobStatusDetail """ - :param inputs: - :type inputs: List[~azure.ai.documenttranslation.BatchTranslationInput] - :return: BatchStatusDetail - :rtype: BatchStatusDetail + :param batch: + :type batch: List[~azure.ai.documenttranslation.BatchDocumentInput] + :return: JobStatusDetail + :rtype: JobStatusDetail """ return self._client.document_translation.begin_submit_batch_request( - inputs=inputs, + inputs=batch, polling=True, **kwargs ) @distributed_trace - def get_batch_status(self, batch_id, **kwargs): - # type: (str, **Any) -> BatchStatusDetail + def get_job_status(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail """ - :param batch_id: guid id for batch - :type batch_id: str - :rtype: ~azure.ai.documenttranslation.BatchStatusDetail + :param job_id: guid id for job + :type job_id: str + :rtype: ~azure.ai.documenttranslation.JobStatusDetail """ - return self._client.document_translation.get_operation_status(batch_id, **kwargs) + return self._client.document_translation.get_operation_status(job_id, **kwargs) @distributed_trace - def cancel_batch(self, batch_id, **kwargs): + def cancel_job(self, job_id, **kwargs): # type: (str, **Any) -> None """ - :param batch_id: guid id for batch - :type batch_id: str + :param job_id: guid id for job + :type job_id: str :rtype: None """ - self._client.document_translation.cancel_operation(batch_id, **kwargs) + self._client.document_translation.cancel_operation(job_id, **kwargs) @distributed_trace - def wait_until_done(self, batch_id, **kwargs): - # type: (str, **Any) -> BatchStatusDetail + def wait_until_done(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail """ - :param batch_id: guid id for batch - :type batch_id: str - :return: BatchStatusDetail - :rtype: BatchStatusDetail + :param job_id: guid id for job + :type job_id: str + :return: JobStatusDetail + :rtype: JobStatusDetail """ pass @distributed_trace - def list_batches_statuses(self, **kwargs): - # type: (**Any) -> ItemPaged[BatchStatusDetail] + def list_submitted_jobs(self, **kwargs): + # type: (**Any) -> ItemPaged[JobStatusDetail] """ :keyword int results_per_page: :keyword int skip: - :rtype: ~azure.core.polling.ItemPaged[BatchStatusDetail] + :rtype: ~azure.core.polling.ItemPaged[JobStatusDetail] """ return self._client.document_translation.get_operations(**kwargs) @distributed_trace - def list_documents_statuses(self, batch_id, **kwargs): + def list_documents_statuses(self, job_id, **kwargs): # type: (str, **Any) -> ItemPaged[DocumentStatusDetail] """ - :param batch_id: guid id for batch - :type batch_id: str + :param job_id: guid id for job + :type job_id: str :keyword int results_per_page: :keyword int skip: :rtype: ~azure.core.paging.ItemPaged[DocumentStatusDetail] """ - return self._client.document_translation.get_operation_documents_status(batch_id, **kwargs) + return self._client.document_translation.get_operation_documents_status(job_id, **kwargs) @distributed_trace - def get_document_status(self, batch_id, document_id, **kwargs): + def get_document_status(self, job_id, document_id, **kwargs): # type: (str, str, **Any) -> DocumentStatusDetail """ - :param batch_id: guid id for batch - :type batch_id: str + :param job_id: guid id for job + :type job_id: str :param document_id: guid id for document :type document_id: str :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail """ - return self._client.document_translation.get_document_status(batch_id, document_id, **kwargs) + return self._client.document_translation.get_document_status(job_id, document_id, **kwargs) @distributed_trace def get_supported_storage_sources(self, **kwargs): diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py index f43bafcc69b8..00b0f0d9a137 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py @@ -61,7 +61,7 @@ def __init__( self.storage_source = kwargs.get("storage_source", None) -class BatchTranslationInput(object): +class BatchDocumentInput(object): """Definition for the input batch translation request. :param source_url: Required. Location of the folder / container or single file with your @@ -99,10 +99,10 @@ def __init__( self.suffix = kwargs.get("suffix", None) -class BatchStatusDetail(object): +class JobStatusDetail(object): """Job status response. - :ivar id: Required. Id of the operation. + :ivar id: Required. Id of the job. :vartype id: str :ivar created_on: Required. Operation created date time. :vartype created_on: ~datetime.datetime diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py index 39745494b7f1..eaededf5f0ee 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/aio/_client_async.py @@ -5,13 +5,12 @@ # ------------------------------------ from typing import Union, Any, List, TYPE_CHECKING -from azure.core.polling import AsyncLROPoller from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.tracing.decorator import distributed_trace from azure.core.async_paging import AsyncItemPaged from .._generated.aio import BatchDocumentTranslationClient as _BatchDocumentTranslationClient from .._user_agent import USER_AGENT -from .._models import BatchStatusDetail, DocumentStatusDetail, BatchTranslationInput, FileFormat +from .._models import JobStatusDetail, DocumentStatusDetail, BatchDocumentInput, FileFormat from .._helpers import get_authentication_policy if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -50,92 +49,95 @@ def __init__( ) @distributed_trace_async - async def create_batch(self, inputs, **kwargs): - # type: (List[BatchTranslationInput], **Any) -> BatchStatusDetail + async def create_translation_job(self, batch, **kwargs): + # type: (List[BatchDocumentInput], **Any) -> JobStatusDetail """ - :param inputs: - :type inputs: List[~azure.ai.documenttranslation.BatchTranslationInput] - :rtype: BatchStatusDetail + :param batch: + :type batch: List[~azure.ai.documenttranslation.BatchDocumentInput] + :return: JobStatusDetail + :rtype: JobStatusDetail """ + return await self._client.document_translation.begin_submit_batch_request( - inputs=inputs, + inputs=batch, polling=True, **kwargs ) @distributed_trace_async - async def get_batch_status(self, batch_id, **kwargs): - # type: (str, **Any) -> BatchStatusDetail + async def get_job_status(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail """ - :param batch_id: guid id for batch - :type batch_id: str - :rtype: ~azure.ai.documenttranslation.BatchStatusDetail + :param job_id: guid id for job + :type job_id: str + :rtype: ~azure.ai.documenttranslation.JobStatusDetail """ - return await self._client.document_translation.get_operation_status(batch_id, **kwargs) + + return await self._client.document_translation.get_operation_status(job_id, **kwargs) @distributed_trace_async - async def wait_until_done(self, batch_id, **kwargs): - # type: (str, **Any) -> BatchStatusDetail + async def cancel_job(self, job_id, **kwargs): + # type: (str, **Any) -> None """ - :param batch_id: guid id for batch - :type batch_id: str - :return: BatchStatusDetail - :rtype: BatchStatusDetail + :param job_id: guid id for job + :type job_id: str + :rtype: None """ - pass + + await self._client.document_translation.cancel_operation(job_id, **kwargs) @distributed_trace_async - async def cancel_batch(self, batch_id, **kwargs): - # type: (str, **Any) -> None + async def wait_until_done(self, job_id, **kwargs): + # type: (str, **Any) -> JobStatusDetail """ - :param batch_id: guid id for batch - :type batch_id: str - :rtype: None + :param job_id: guid id for job + :type job_id: str + :return: JobStatusDetail + :rtype: JobStatusDetail """ - - await self._client.document_translation.cancel_operation(batch_id, **kwargs) + pass @distributed_trace - def list_batches_statuses(self, **kwargs): - # type: (**Any) -> AsyncItemPaged[BatchStatusDetail] + def list_submitted_jobs(self, **kwargs): + # type: (**Any) -> AsyncItemPaged[JobStatusDetail] """ :keyword int results_per_page: :keyword int skip: - :rtype: ~azure.core.async_paging.AsyncItemPaged[BatchStatusDetail] + :rtype: ~azure.core.polling.AsyncItemPaged[JobStatusDetail] """ return self._client.document_translation.get_operations(**kwargs) @distributed_trace - def list_documents_statuses(self, batch_id, **kwargs): + def list_documents_statuses(self, job_id, **kwargs): # type: (str, **Any) -> AsyncItemPaged[DocumentStatusDetail] """ - :param batch_id: guid id for batch - :type batch_id: str + :param job_id: guid id for job + :type job_id: str :keyword int results_per_page: :keyword int skip: - :rtype: ~azure.core.async_paging.AsyncItemPaged[DocumentStatusDetail] + :rtype: ~azure.core.paging.AsyncItemPaged[DocumentStatusDetail] """ - return self._client.document_translation.get_operation_documents_status(batch_id, **kwargs) + return self._client.document_translation.get_operation_documents_status(job_id, **kwargs) @distributed_trace_async - async def get_document_status(self, batch_id, document_id, **kwargs): + async def get_document_status(self, job_id, document_id, **kwargs): # type: (str, str, **Any) -> DocumentStatusDetail """ - :param batch_id: guid id for batch - :type batch_id: str + :param job_id: guid id for job + :type job_id: str :param document_id: guid id for document :type document_id: str :rtype: ~azure.ai.documenttranslation.DocumentStatusDetail """ - return await self._client.document_translation.get_document_status(batch_id, document_id, **kwargs) + return await self._client.document_translation.get_document_status(job_id, document_id, **kwargs) @distributed_trace_async async def get_supported_storage_sources(self, **kwargs): diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py index 65886cb83258..6501fa7b4d06 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py @@ -10,7 +10,7 @@ def batch_translation_with_storage(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchTranslationInput, + BatchDocumentInput, StorageTarget ) from azure.storage.blob import ContainerClient, generate_container_sas, ContainerSasPermissions @@ -26,7 +26,7 @@ def batch_translation_with_storage(): target_storage_container_name = os.environ["AZURE_STORAGE_TARGET_CONTAINER_NAME"] target_storage_key = os.environ["AZURE_STORAGE_TARGET_KEY"] - batch_client = DocumentTranslationClient( + translation_client = DocumentTranslationClient( endpoint, AzureKeyCredential(key) ) @@ -57,7 +57,7 @@ def batch_translation_with_storage(): target_container_url = target_storage_endpoint + "/" + target_storage_container_name + "?" + target_container_sas batch = [ - BatchTranslationInput( + BatchDocumentInput( source_url=source_container_url, source_language="en", targets=[ @@ -70,18 +70,18 @@ def batch_translation_with_storage(): ) ] - batch_detail = batch_client.create_batch(batch) - batch_result = batch_client.wait_until_done(batch_detail.id) + job_detail = translation_client.create_translation_job(batch) + job_result = translation_client.wait_until_done(job_detail.id) - if batch_result.status == "Succeeded": + if job_result.status == "Succeeded": print("We translated our documents!") - if batch_result.documents_failed_count > 0: - check_documents(batch_client, batch_result.id) + if job_result.documents_failed_count > 0: + check_documents(translation_client, job_result.id) - if batch_result.status in ["Failed", "ValidationFailed"]: - if batch_result.error: - print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) - check_documents(batch_client, batch_result.id) + if job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + check_documents(translation_client, job_result.id) exit(1) container_client = ContainerClient( @@ -97,11 +97,11 @@ def batch_translation_with_storage(): my_blob.write(download_stream.readall()) -def check_documents(client, batch_id): +def check_documents(client, job_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py index 740d68bf1b25..cd7c992f63fc 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py @@ -10,7 +10,7 @@ def sample_batch_translation(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchTranslationInput, + BatchDocumentInput, StorageTarget ) @@ -23,7 +23,7 @@ def sample_batch_translation(): client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchTranslationInput( + BatchDocumentInput( source_url=source_container_url, source_language="en", targets=[ @@ -40,29 +40,29 @@ def sample_batch_translation(): ) ] - batch_detail = client.create_batch(batch) # type: BatchStatusDetail + job_detail = client.create_translation_job(batch) # type: JobStatusDetail - print("Batch initial status: {}".format(batch_detail.status)) - print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) - batch_result = client.wait_until_done(batch_detail.id) # type: BatchStatusDetail - if batch_result.status == "Succeeded": + job_result = client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": print("We translated our documents!") - if batch_result.documents_failed_count > 0: - check_documents(client, batch_result.id) + if job_result.documents_failed_count > 0: + check_documents(client, job_result.id) - if batch_result.status in ["Failed", "ValidationFailed"]: - if batch_result.error: - print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) - check_documents(client, batch_result.id) + if job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + check_documents(client, job_result.id) exit(1) -def check_documents(client, batch_id): +def check_documents(client, job_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py similarity index 66% rename from sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py rename to sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py index cd97ebc878d3..006d17711bee 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_batch.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py @@ -10,7 +10,7 @@ def sample_cancel_batch(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchTranslationInput, + BatchDocumentInput, StorageTarget ) @@ -22,7 +22,7 @@ def sample_cancel_batch(): client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchTranslationInput( + BatchDocumentInput( source_url=source_container_url, source_language="en", targets=[ @@ -35,13 +35,13 @@ def sample_cancel_batch(): ) ] - batch_detail = client.create_batch(batch) # type: BatchStatusDetail + job_detail = client.create_translation_job(batch) # type: JobStatusDetail - print("Batch status: {}".format(batch_detail.status)) - print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) - client.cancel_batch(batch_detail.id) - detail = client.get_batch_status(batch_detail.id) # type: BatchStatusDetail + client.cancel_job(job_detail.id) + job_detail = client.get_job_status(job_detail.id) # type: JobStatusDetail - if detail.status in ["Cancelled", "Cancelling"]: - print("We cancelled batch with ID: {}".format(detail.id)) + if job_detail.status in ["Cancelled", "Cancelling"]: + print("We cancelled job with ID: {}".format(job_detail.id)) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py index c3d97b209953..4d47f3ca8821 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py @@ -11,7 +11,7 @@ def sample_batch_translation(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchTranslationInput, + BatchDocumentInput, StorageTarget ) @@ -24,7 +24,7 @@ def sample_batch_translation(): client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchTranslationInput( + BatchDocumentInput( source_url=source_container_url, source_language="en", targets=[ @@ -42,32 +42,32 @@ def sample_batch_translation(): ) ] - batch_detail = client.create_batch(batch) + job_detail = client.create_translation_job(batch) while True: - batch_detail = client.get_batch_status(batch_detail.id) # type: BatchStatusDetail - if batch_detail.status in ["NotStarted", "Running"]: - time.sleep(5) + job_detail = client.get_job_status(job_detail.id) # type: JobStatusDetail + if job_detail.status in ["NotStarted", "Running"]: + time.sleep(10) continue - if batch_detail.status in ["Failed", "ValidationFailed"]: - if batch_detail.error: - print("Batch failed: {}: {}".format(batch_detail.error.code, batch_detail.error.message)) - check_documents(client, batch_detail.id) + if job_detail.status in ["Failed", "ValidationFailed"]: + if job_detail.error: + print("Translation job failed: {}: {}".format(job_detail.error.code, job_detail.error.message)) + check_documents(client, job_detail.id) exit(1) - if batch_detail.status == "Succeeded": + if job_detail.status == "Succeeded": print("We translated our documents!") - if batch_detail.documents_failed_count > 0: - check_documents(client, batch_detail.id) + if job_detail.documents_failed_count > 0: + check_documents(client, job_detail.id) break -def check_documents(client, batch_id): +def check_documents(client, job_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py index 745a9f46a548..6cec8ed9ab04 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py @@ -10,7 +10,7 @@ def sample_custom_translation(): from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( DocumentTranslationClient, - BatchTranslationInput, + BatchDocumentInput, StorageTarget ) @@ -23,7 +23,7 @@ def sample_custom_translation(): client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) batch = [ - BatchTranslationInput( + BatchDocumentInput( source_url=source_container_url, source_language="en", targets=[ @@ -37,29 +37,29 @@ def sample_custom_translation(): ) ] - batch_detail = client.create_batch(batch) # type: BatchStatusDetail + job_detail = client.create_translation_job(batch) # type: JobStatusDetail - print("Batch initial status: {}".format(batch_detail.status)) - print("Number of translations on documents: {}".format(batch_detail.documents_total_count)) + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) - batch_result = client.wait_until_done(batch_detail.id) # type: BatchStatusDetail - if batch_result.status == "Succeeded": + job_result = client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": print("We translated our documents!") - if batch_result.documents_failed_count > 0: - check_documents(client, batch_result.id) + if job_result.documents_failed_count > 0: + check_documents(client, job_result.id) - if batch_result.status in ["Failed", "ValidationFailed"]: - if batch_result.error: - print("Batch failed: {}: {}".format(batch_result.error.code, batch_result.error.message)) - check_documents(client, batch_result.id) + if job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + check_documents(client, job_result.id) exit(1) -def check_documents(client, batch_id): +def check_documents(client, job_id): from azure.core.exceptions import ResourceNotFoundError try: - doc_statuses = client.list_documents_statuses(batch_id) # type: ItemPaged[DocumentStatusDetail] + doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: print("Failed to process any documents in source/target container.") raise err diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py index 0abb8a57a468..cc2495de8e9e 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py @@ -5,7 +5,7 @@ # ------------------------------------ -def sample_list_all_batches(): +def sample_list_all_jobs(): import os from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( @@ -16,19 +16,18 @@ def sample_list_all_batches(): key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) - batches = client.list_batches_statuses() + jobs = client.list_submitted_jobs() - print("Batches summary") - for batch in batches: - print("Batch ID: {}".format(batch.id)) - print("Batch status: {}".format(batch.status)) - print("Batch created on: {}".format(batch.created_on)) - print("Batch last updated on: {}".format(batch.last_updated_on)) - print("Batch number of translations on documents: {}".format(batch.documents_total_count)) + for job in jobs: + print("Job ID: {}".format(job.id)) + print("Job status: {}".format(job.status)) + print("Job created on: {}".format(job.created_on)) + print("Job last updated on: {}".format(job.last_updated_on)) + print("Total number of translations on documents: {}".format(job.documents_total_count)) print("Of total documents...") - print("{} failed".format(batch.documents_failed_count)) - print("{} succeeded".format(batch.documents_succeeded_count)) - print("{} in progress".format(batch.documents_in_progress_count)) - print("{} not yet started".format(batch.documents_not_yet_started_count)) - print("{} cancelled".format(batch.documents_cancelled_count)) + print("{} failed".format(job.documents_failed_count)) + print("{} succeeded".format(job.documents_succeeded_count)) + print("{} in progress".format(job.documents_in_progress_count)) + print("{} not yet started".format(job.documents_not_yet_started_count)) + print("{} cancelled".format(job.documents_cancelled_count)) From b972486d549a66ae6b33d3f95486687c1b1ff45e Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Tue, 2 Mar 2021 07:58:52 -0800 Subject: [PATCH 6/8] update samples - optional src language --- .../azure/ai/documenttranslation/_models.py | 9 +++--- .../samples/sample_azure_storage.py | 7 +++-- .../samples/sample_batch_translation.py | 30 ++++++++++++++----- .../samples/sample_cancel_translation_job.py | 7 +++-- .../samples/sample_check_statuses.py | 15 ++++++---- .../samples/sample_custom_translation.py | 20 ++++++++----- .../samples/sample_list_batches.py | 10 ++++++- 7 files changed, 67 insertions(+), 31 deletions(-) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py index 00b0f0d9a137..e5928093c103 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/azure/ai/documenttranslation/_models.py @@ -67,10 +67,10 @@ class BatchDocumentInput(object): :param source_url: Required. Location of the folder / container or single file with your documents. :type source_url: str - :param str source_language: Language code - If none is specified, we will perform auto detect on the document. :param targets: Required. Location of the destination for the output. :type targets: list[StorageTarget] + :keyword str source_language: Language code + If none is specified, we will perform auto detect on the document. :keyword str prefix: A case-sensitive prefix string to filter documents in the source path for translation. For example, when using a Azure storage blob Uri, use the prefix to restrict sub folders for translation. @@ -85,14 +85,13 @@ class BatchDocumentInput(object): def __init__( self, source_url, - source_language, targets, **kwargs ): - # type: (str, str, List[StorageTarget], **Any) -> None + # type: (str, List[StorageTarget], **Any) -> None self.source_url = source_url - self.source_language = source_language self.targets = targets + self.source_language = kwargs.get("source_language", None) self.storage_type = kwargs.get("storage_type", None) self.storage_source = kwargs.get("storage_source", None) self.prefix = kwargs.get("prefix", None) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py index 6501fa7b4d06..dcbb9ec80298 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py @@ -59,7 +59,6 @@ def batch_translation_with_storage(): batch = [ BatchDocumentInput( source_url=source_container_url, - source_language="en", targets=[ StorageTarget( target_url=target_container_url, @@ -78,7 +77,7 @@ def batch_translation_with_storage(): if job_result.documents_failed_count > 0: check_documents(translation_client, job_result.id) - if job_result.status in ["Failed", "ValidationFailed"]: + elif job_result.status in ["Failed", "ValidationFailed"]: if job_result.error: print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) check_documents(translation_client, job_result.id) @@ -117,3 +116,7 @@ def check_documents(client, job_id): )) if document.url not in docs_to_retry: docs_to_retry.append(document.url) + + +if __name__ == '__main__': + batch_translation_with_storage() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py index cd7c992f63fc..14bdd62b2d5b 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation.py @@ -16,7 +16,8 @@ def sample_batch_translation(): endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] - source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + source_container_url_en = os.environ["AZURE_SOURCE_CONTAINER_URL_EN"] + source_container_url_de = os.environ["AZURE_SOURCE_CONTAINER_URL_DE"] target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] @@ -24,8 +25,7 @@ def sample_batch_translation(): batch = [ BatchDocumentInput( - source_url=source_container_url, - source_language="en", + source_url=source_container_url_en, targets=[ StorageTarget( target_url=target_container_url_es, @@ -35,8 +35,20 @@ def sample_batch_translation(): target_url=target_container_url_fr, language="fr" ) - ], - storage_type="file" + ] + ), + BatchDocumentInput( + source_url=source_container_url_de, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ] ) ] @@ -51,7 +63,7 @@ def sample_batch_translation(): if job_result.documents_failed_count > 0: check_documents(client, job_result.id) - if job_result.status in ["Failed", "ValidationFailed"]: + elif job_result.status in ["Failed", "ValidationFailed"]: if job_result.error: print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) check_documents(client, job_result.id) @@ -64,7 +76,7 @@ def check_documents(client, job_id): try: doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: - print("Failed to process any documents in source/target container.") + print("Failed to process any documents in source/target container due to insufficient permissions.") raise err docs_to_retry = [] @@ -78,3 +90,7 @@ def check_documents(client, job_id): )) if document.url not in docs_to_retry: docs_to_retry.append(document.url) + + +if __name__ == '__main__': + sample_batch_translation() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py index 006d17711bee..619ada01740d 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py @@ -5,7 +5,7 @@ # ------------------------------------ -def sample_cancel_batch(): +def sample_cancel_job(): import os from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( @@ -24,7 +24,6 @@ def sample_cancel_batch(): batch = [ BatchDocumentInput( source_url=source_container_url, - source_language="en", targets=[ StorageTarget( target_url=target_container_url_es, @@ -45,3 +44,7 @@ def sample_cancel_batch(): if job_detail.status in ["Cancelled", "Cancelling"]: print("We cancelled job with ID: {}".format(job_detail.id)) + + +if __name__ == '__main__': + sample_cancel_job() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py index 4d47f3ca8821..596b373db915 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py @@ -5,7 +5,7 @@ # ------------------------------------ -def sample_batch_translation(): +def sample_translation_status_checks(): import os import time from azure.core.credentials import AzureKeyCredential @@ -26,7 +26,6 @@ def sample_batch_translation(): batch = [ BatchDocumentInput( source_url=source_container_url, - source_language="en", targets=[ StorageTarget( target_url=target_container_url_es, @@ -47,16 +46,16 @@ def sample_batch_translation(): while True: job_detail = client.get_job_status(job_detail.id) # type: JobStatusDetail if job_detail.status in ["NotStarted", "Running"]: - time.sleep(10) + time.sleep(30) continue - if job_detail.status in ["Failed", "ValidationFailed"]: + elif job_detail.status in ["Failed", "ValidationFailed"]: if job_detail.error: print("Translation job failed: {}: {}".format(job_detail.error.code, job_detail.error.message)) check_documents(client, job_detail.id) exit(1) - if job_detail.status == "Succeeded": + elif job_detail.status == "Succeeded": print("We translated our documents!") if job_detail.documents_failed_count > 0: check_documents(client, job_detail.id) @@ -69,7 +68,7 @@ def check_documents(client, job_id): try: doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: - print("Failed to process any documents in source/target container.") + print("Failed to process any documents in source/target container due to insufficient permissions.") raise err docs_to_retry = [] @@ -83,3 +82,7 @@ def check_documents(client, job_id): )) if document.url not in docs_to_retry: docs_to_retry.append(document.url) + + +if __name__ == '__main__': + sample_translation_status_checks() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py index 6cec8ed9ab04..30321928eaca 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_custom_translation.py @@ -25,12 +25,12 @@ def sample_custom_translation(): batch = [ BatchDocumentInput( source_url=source_container_url, - source_language="en", targets=[ StorageTarget( target_url=target_container_url_fr, language="fr", - category_id=category_id + category_id=category_id, + glossaries=["https://exampleglossary"] ) ], prefix="document_2021" @@ -48,11 +48,11 @@ def sample_custom_translation(): if job_result.documents_failed_count > 0: check_documents(client, job_result.id) - if job_result.status in ["Failed", "ValidationFailed"]: - if job_result.error: - print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) - check_documents(client, job_result.id) - exit(1) + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + check_documents(client, job_result.id) + exit(1) def check_documents(client, job_id): @@ -61,7 +61,7 @@ def check_documents(client, job_id): try: doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: - print("Failed to process any documents in source/target container.") + print("Failed to process any documents in source/target container due to insufficient permissions.") raise err docs_to_retry = [] @@ -75,3 +75,7 @@ def check_documents(client, job_id): )) if document.url not in docs_to_retry: docs_to_retry.append(document.url) + + +if __name__ == '__main__': + sample_custom_translation() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py index cc2495de8e9e..2961b9516301 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py @@ -16,14 +16,18 @@ def sample_list_all_jobs(): key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) - jobs = client.list_submitted_jobs() + jobs = client.list_submitted_jobs() # type: ItemPaged[JobStatusDetail] for job in jobs: + if job.status in ["NotStarted", "Running"]: + job = client.wait_until_done(job.id) + print("Job ID: {}".format(job.id)) print("Job status: {}".format(job.status)) print("Job created on: {}".format(job.created_on)) print("Job last updated on: {}".format(job.last_updated_on)) print("Total number of translations on documents: {}".format(job.documents_total_count)) + print("Total number of characters charged: {}".format(job.total_characters_charged)) print("Of total documents...") print("{} failed".format(job.documents_failed_count)) @@ -31,3 +35,7 @@ def sample_list_all_jobs(): print("{} in progress".format(job.documents_in_progress_count)) print("{} not yet started".format(job.documents_not_yet_started_count)) print("{} cancelled".format(job.documents_cancelled_count)) + + +if __name__ == '__main__': + sample_list_all_jobs() From 28aabfbc65d705eb7f61c24457e585de130c50a2 Mon Sep 17 00:00:00 2001 From: Mohamed Shaban Date: Wed, 3 Mar 2021 11:42:30 -0500 Subject: [PATCH 7/8] samples hero scenarios (#16936) * [samples] added 'batch_translation_async' sample * [samples] added 'batch_translation_with_storage_async' sample * [samples] added remianing async samples * [samples] update file names * [samples] added self to instance methods * [samples][async] fix import textanalytics :) * [samples] fix self. when calling instance methods * [samples] fixed async check status to use AsyncItemPaged used in Async Client * [samples] async -> some async operations instead of sync ones * [samples][async] use async blob operations * [samples][async] blob download async * [samples][async] check_documents async * [samples][async] added some missing await methods * [async samples] change await time to recommended period * [samples] updated async samples to comply with new changes --- .../sample_batch_translation_async.py | 113 ++++++++++++++ ...le_batch_translation_with_storage_async.py | 142 ++++++++++++++++++ .../sample_cancel_translation_job_async.py | 67 +++++++++ .../sample_custom_translation_async.py | 96 ++++++++++++ .../sample_list_all_submitted_jobs_async.py | 54 +++++++ .../sample_translation_status_checks_async.py | 103 +++++++++++++ ... sample_batch_translation_with_storage.py} | 6 +- .../samples/sample_cancel_translation_job.py | 4 +- ...s.py => sample_list_all_submitted_jobs.py} | 4 +- ...py => sample_translation_status_checks.py} | 0 10 files changed, 582 insertions(+), 7 deletions(-) create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_async.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_with_storage_async.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_cancel_translation_job_async.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_custom_translation_async.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_list_all_submitted_jobs_async.py create mode 100644 sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_translation_status_checks_async.py rename sdk/documenttranslation/azure-ai-documenttranslation/samples/{sample_azure_storage.py => sample_batch_translation_with_storage.py} (97%) rename sdk/documenttranslation/azure-ai-documenttranslation/samples/{sample_list_batches.py => sample_list_all_submitted_jobs.py} (95%) rename sdk/documenttranslation/azure-ai-documenttranslation/samples/{sample_check_statuses.py => sample_translation_status_checks.py} (100%) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_async.py new file mode 100644 index 000000000000..5e9897646319 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_async.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + + +class BatchTranslationSampleAsync(object): + + async def batch_translation_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url_en = os.environ["AZURE_SOURCE_CONTAINER_URL_EN"] + source_container_url_de = os.environ["AZURE_SOURCE_CONTAINER_URL_DE"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + # create service client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # prepare translation job input + batch = [ + BatchDocumentInput( + source_url=source_container_url_en, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ] + ), + BatchDocumentInput( + source_url=source_container_url_de, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ] + ) + ] + + # run translation job + async with client: + job_detail = await client.create_translation_job(batch) # type: JobStatusDetail + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + # get job result + job_result = await client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + await self.check_documents(client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + await self.check_documents(client, job_result.id) + exit(1) + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +async def main(): + sample = BatchTranslationSampleAsync() + await sample.batch_translation_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_with_storage_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_with_storage_async.py new file mode 100644 index 000000000000..fa8af0255101 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_batch_translation_with_storage_async.py @@ -0,0 +1,142 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + + +class BatchTranslationWithStorageSampleAsync(object): + + async def batch_translation_with_storage_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + from azure.storage.blob.aio import ContainerClient + from azure.storage.blob import ( + generate_container_sas, + ContainerSasPermissions + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_storage_endpoint = os.environ["AZURE_STORAGE_SOURCE_ENDPOINT"] + source_storage_account_name = os.environ["AZURE_STORAGE_SOURCE_ACCOUNT_NAME"] + source_storage_container_name = os.environ["AZURE_STORAGE_SOURCE_CONTAINER_NAME"] + source_storage_key = os.environ["AZURE_STORAGE_SOURCE_KEY"] + target_storage_endpoint = os.environ["AZURE_STORAGE_TARGET_ENDPOINT"] + target_storage_account_name = os.environ["AZURE_STORAGE_TARGET_ACCOUNT_NAME"] + target_storage_container_name = os.environ["AZURE_STORAGE_TARGET_CONTAINER_NAME"] + target_storage_key = os.environ["AZURE_STORAGE_TARGET_KEY"] + + # create service clients + translation_client = DocumentTranslationClient( + endpoint, AzureKeyCredential(key) + ) + + container_client = ContainerClient( + source_storage_endpoint, + container_name=source_storage_container_name, + credential=source_storage_key + ) + + # upload some document for translation + with open("document.txt", "rb") as doc: + await container_client.upload_blob(name="document.txt", data=doc) + + # prepare translation job input + source_container_sas = generate_container_sas( + account_name=source_storage_account_name, + container_name=source_storage_container_name, + account_key=source_storage_key, + permission=ContainerSasPermissions.from_string("rl") + ) + + target_container_sas = generate_container_sas( + account_name=target_storage_account_name, + container_name=target_storage_container_name, + account_key=target_storage_key, + permission=ContainerSasPermissions.from_string("rlwd") + ) + + source_container_url = source_storage_endpoint + "/" + source_storage_container_name + "?" + source_container_sas + target_container_url = target_storage_endpoint + "/" + target_storage_container_name + "?" + target_container_sas + + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url, + language="es" + ) + ], + prefix="document" + ) + ] + + # run job + async with translation_client: + job_detail = await translation_client.create_translation_job(batch) + job_result = await translation_client.wait_until_done(job_detail.id) + + # poll status result + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + await self.check_documents(translation_client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + await self.check_documents(translation_client, job_result.id) + exit(1) + + # store result documents + container_client = ContainerClient( + target_storage_endpoint, + container_name=target_storage_container_name, + credential=target_storage_key + ) + + with open("translated.txt", "wb") as my_blob: + download_stream = await container_client.download_blob("document.txt") + my_blob.write(await download_stream.readall()) + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + +async def main(): + sample = BatchTranslationWithStorageSampleAsync() + await sample.batch_translation_with_storage_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_cancel_translation_job_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_cancel_translation_job_async.py new file mode 100644 index 000000000000..2cb61c61e33d --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_cancel_translation_job_async.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + +class CancelTranslationJobSampleAsync(object): + + async def cancel_translation_job_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + + # prepare translation job input + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ) + ], + storage_type="file" + ) + ] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # run job + async with client: + job_detail = await client.create_translation_job(batch) + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + await client.cancel_job(job_detail.id) + job_detail = await client.get_job_status(job_detail.id) # type: JobStatusDetail + + if job_detail.status in ["Cancelled", "Cancelling"]: + print("We cancelled job with ID: {}".format(job_detail.id)) + + +async def main(): + sample = CancelTranslationJobSampleAsync() + await sample.cancel_translation_job_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) + + diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_custom_translation_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_custom_translation_async.py new file mode 100644 index 000000000000..6d0ec8d2d815 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_custom_translation_async.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + +class CustomTranslationSampleAsync(object): + + async def custom_translation_async(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + category_id = os.environ["AZURE_DOCUMENT_TRANSLATION_MODEL_ID"] + + # prepare translation job input + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_fr, + language="fr", + category_id=category_id, + glossaries=["https://exampleglossary"] + ) + ], + prefix="document_2021" + ) + ] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # run translation job + async with client: + job_detail = await client.create_translation_job(batch) + + print("Job initial status: {}".format(job_detail.status)) + print("Number of translations on documents: {}".format(job_detail.documents_total_count)) + + job_result = await client.wait_until_done(job_detail.id) # type: JobStatusDetail + if job_result.status == "Succeeded": + print("We translated our documents!") + if job_result.documents_failed_count > 0: + await self.check_documents(client, job_result.id) + + elif job_result.status in ["Failed", "ValidationFailed"]: + if job_result.error: + print("Translation job failed: {}: {}".format(job_result.error.code, job_result.error.message)) + await self.check_documents(client, job_result.id) + exit(1) + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +async def main(): + sample = CustomTranslationSampleAsync() + await sample.custom_translation_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_list_all_submitted_jobs_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_list_all_submitted_jobs_async.py new file mode 100644 index 000000000000..ecb983cfe869 --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_list_all_submitted_jobs_async.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio + +class ListAllSubmittedJobsSampleAsync(object): + + def list_all_submitted_jobs(self): + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # list submitted jobs + jobs = client.list_submitted_jobs() # type: AsyncItemPaged[JobStatusDetail] + + async for job in jobs: + # wait for job to finish + if job.status in ["NotStarted", "Running"]: + job = client.wait_until_done(job.id) + + print("Job ID: {}".format(job.id)) + print("Job status: {}".format(job.status)) + print("Job created on: {}".format(job.created_on)) + print("Job last updated on: {}".format(job.last_updated_on)) + print("Total number of translations on documents: {}".format(job.documents_total_count)) + print("Total number of characters charged: {}".format(job.total_characters_charged)) + + print("Of total documents...") + print("{} failed".format(job.documents_failed_count)) + print("{} succeeded".format(job.documents_succeeded_count)) + print("{} in progress".format(job.documents_in_progress_count)) + print("{} not yet started".format(job.documents_not_yet_started_count)) + print("{} cancelled".format(job.documents_cancelled_count)) + + +async def main(): + sample = ListAllSubmittedJobsSampleAsync() + await sample.list_all_submitted_jobs() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_translation_status_checks_async.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_translation_status_checks_async.py new file mode 100644 index 000000000000..3e0dcef5b8bb --- /dev/null +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/async_samples/sample_translation_status_checks_async.py @@ -0,0 +1,103 @@ +# coding=utf-8 +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ + +import os +import asyncio +import time + +class TranslationStatusChecksSampleAsync(object): + + async def translation_status_checks_async(self): + + # import libraries + from azure.core.credentials import AzureKeyCredential + from azure.ai.documenttranslation.aio import DocumentTranslationClient + from azure.ai.documenttranslation import ( + BatchDocumentInput, + StorageTarget + ) + + # get service secrets + endpoint = os.environ["AZURE_DOCUMENT_TRANSLATION_ENDPOINT"] + key = os.environ["AZURE_DOCUMENT_TRANSLATION_KEY"] + source_container_url = os.environ["AZURE_SOURCE_CONTAINER_URL"] + target_container_url_es = os.environ["AZURE_TARGET_CONTAINER_URL_ES"] + target_container_url_fr = os.environ["AZURE_TARGET_CONTAINER_URL_FR"] + + # prepare translation input + batch = [ + BatchDocumentInput( + source_url=source_container_url, + targets=[ + StorageTarget( + target_url=target_container_url_es, + language="es" + ), + StorageTarget( + target_url=target_container_url_fr, + language="fr" + ) + ], + storage_type="folder", + prefix="document_2021" + ) + ] + + # create translation client + client = DocumentTranslationClient(endpoint, AzureKeyCredential(key)) + + # run translation job + async with client: + job_detail = await client.create_translation_job(batch) + while True: + job_detail = await client.get_job_status(job_detail.id) # type: JobStatusDetail + if job_detail.status in ["NotStarted", "Running"]: + await asyncio.sleep(30) + continue + + elif job_detail.status in ["Failed", "ValidationFailed"]: + if job_detail.error: + print("Translation job failed: {}: {}".format(job_detail.error.code, job_detail.error.message)) + await self.check_documents(client, job_detail.id) + exit(1) + + elif job_detail.status == "Succeeded": + print("We translated our documents!") + if job_detail.documents_failed_count > 0: + await self.check_documents(client, job_detail.id) + break + + + async def check_documents(self, client, job_id): + from azure.core.exceptions import ResourceNotFoundError + + try: + doc_statuses = client.list_documents_statuses(job_id) # type: AsyncItemPaged[DocumentStatusDetail] + except ResourceNotFoundError as err: + print("Failed to process any documents in source/target container due to insufficient permissions.") + raise err + + docs_to_retry = [] + async for document in doc_statuses: + if document.status == "Failed": + print("Document at {} failed to be translated to {} language".format( + document.url, document.translate_to + )) + print("Document ID: {}, Error Code: {}, Message: {}".format( + document.id, document.error.code, document.error.message + )) + if document.url not in docs_to_retry: + docs_to_retry.append(document.url) + + +async def main(): + sample = TranslationStatusChecksSampleAsync() + await sample.translation_status_checks_async() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) \ No newline at end of file diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation_with_storage.py similarity index 97% rename from sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py rename to sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation_with_storage.py index dcbb9ec80298..d0968c411378 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_azure_storage.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_batch_translation_with_storage.py @@ -5,7 +5,7 @@ # ------------------------------------ -def batch_translation_with_storage(): +def sample_batch_translation_with_storage(): import os from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( @@ -102,7 +102,7 @@ def check_documents(client, job_id): try: doc_statuses = client.list_documents_statuses(job_id) # type: ItemPaged[DocumentStatusDetail] except ResourceNotFoundError as err: - print("Failed to process any documents in source/target container.") + print("Failed to process any documents in source/target container due to insufficient permissions.") raise err docs_to_retry = [] @@ -119,4 +119,4 @@ def check_documents(client, job_id): if __name__ == '__main__': - batch_translation_with_storage() + sample_batch_translation_with_storage() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py index 619ada01740d..4ed96a711b31 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_cancel_translation_job.py @@ -5,7 +5,7 @@ # ------------------------------------ -def sample_cancel_job(): +def sample_cancel_translation_job(): import os from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( @@ -47,4 +47,4 @@ def sample_cancel_job(): if __name__ == '__main__': - sample_cancel_job() + sample_cancel_translation_job() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_all_submitted_jobs.py similarity index 95% rename from sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py rename to sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_all_submitted_jobs.py index 2961b9516301..d80d3de8a52d 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_batches.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_list_all_submitted_jobs.py @@ -5,7 +5,7 @@ # ------------------------------------ -def sample_list_all_jobs(): +def sample_list_all_submitted_jobs(): import os from azure.core.credentials import AzureKeyCredential from azure.ai.documenttranslation import ( @@ -38,4 +38,4 @@ def sample_list_all_jobs(): if __name__ == '__main__': - sample_list_all_jobs() + sample_list_all_submitted_jobs() diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py b/sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_translation_status_checks.py similarity index 100% rename from sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_check_statuses.py rename to sdk/documenttranslation/azure-ai-documenttranslation/samples/sample_translation_status_checks.py From 4501d169fc2bd9695119ad5955b38942f604ad97 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Wed, 3 Mar 2021 08:52:11 -0800 Subject: [PATCH 8/8] remove 3.5 support --- sdk/documenttranslation/azure-ai-documenttranslation/setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sdk/documenttranslation/azure-ai-documenttranslation/setup.py b/sdk/documenttranslation/azure-ai-documenttranslation/setup.py index 2b8dc240462f..d57b8bf7055b 100644 --- a/sdk/documenttranslation/azure-ai-documenttranslation/setup.py +++ b/sdk/documenttranslation/azure-ai-documenttranslation/setup.py @@ -41,7 +41,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8',