Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[text analtyics] Regenerate with v3.0 #11027

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,9 @@

from ._text_analytics_client import TextAnalyticsClient
__all__ = ['TextAnalyticsClient']

try:
from ._patch import patch_sdk
patch_sdk()
except ImportError:
pass
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,17 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

from typing import Any
from typing import TYPE_CHECKING

from azure.core.configuration import Configuration
from azure.core.pipeline import policies

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any

from azure.core.credentials import TokenCredential

VERSION = "unknown"

class TextAnalyticsClientConfiguration(Configuration):
Expand All @@ -20,7 +26,7 @@ class TextAnalyticsClientConfiguration(Configuration):
attributes.

:param credential: Credential needed for the client to connect to Azure.
:type credential: azure.core.credentials.TokenCredential
:type credential: ~azure.core.credentials.TokenCredential
:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com).
:type endpoint: str
"""
Expand All @@ -41,6 +47,7 @@ def __init__(
self.credential = credential
self.endpoint = endpoint
self.credential_scopes = ['https://cognitiveservices.azure.com/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'ai-textanalytics/{}'.format(VERSION))
self._configure(**kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,15 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

from typing import Any
from typing import TYPE_CHECKING

from azure.core import PipelineClient
from msrest import Deserializer, Serializer

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any

from ._configuration import TextAnalyticsClientConfiguration
from .operations import TextAnalyticsClientOperationsMixin
from . import models
Expand All @@ -20,9 +24,10 @@ class TextAnalyticsClient(TextAnalyticsClientOperationsMixin):
"""The Text Analytics API is a suite of text analytics web services built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction and language detection. No training data is needed to use this API; just bring your text data. This API uses advanced natural language processing techniques to deliver best in class predictions. Further documentation can be found in https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview.

:param credential: Credential needed for the client to connect to Azure.
:type credential: azure.core.credentials.TokenCredential
:type credential: ~azure.core.credentials.TokenCredential
:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com).
:type endpoint: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""

def __init__(
Expand All @@ -32,7 +37,7 @@ def __init__(
**kwargs # type: Any
):
# type: (...) -> None
base_url = '{Endpoint}/text/analytics/v3.0-preview.1'
base_url = '{Endpoint}/text/analytics/v3.0'
self._config = TextAnalyticsClientConfiguration(credential, endpoint, **kwargs)
self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,15 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

from typing import Any
from typing import Any, TYPE_CHECKING

from azure.core.configuration import Configuration
from azure.core.pipeline import policies

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential

VERSION = "unknown"

class TextAnalyticsClientConfiguration(Configuration):
Expand All @@ -20,7 +24,7 @@ class TextAnalyticsClientConfiguration(Configuration):
attributes.

:param credential: Credential needed for the client to connect to Azure.
:type credential: azure.core.credentials.AsyncTokenCredential
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com).
:type endpoint: str
"""
Expand All @@ -40,6 +44,7 @@ def __init__(
self.credential = credential
self.endpoint = endpoint
self.credential_scopes = ['https://cognitiveservices.azure.com/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'ai-textanalytics/{}'.format(VERSION))
self._configure(**kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@ class TextAnalyticsClient(TextAnalyticsClientOperationsMixin):
"""The Text Analytics API is a suite of text analytics web services built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction and language detection. No training data is needed to use this API; just bring your text data. This API uses advanced natural language processing techniques to deliver best in class predictions. Further documentation can be found in https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview.

:param credential: Credential needed for the client to connect to Azure.
:type credential: azure.core.credentials.AsyncTokenCredential
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com).
:type endpoint: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""

def __init__(
Expand All @@ -31,7 +32,7 @@ def __init__(
endpoint: str,
**kwargs: Any
) -> None:
base_url = '{Endpoint}/text/analytics/v3.0-preview.1'
base_url = '{Endpoint}/text/analytics/v3.0'
self._config = TextAnalyticsClientConfiguration(credential, endpoint, **kwargs)
self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class TextAnalyticsClientOperationsMixin:

async def entities_recognition_general(
self,
documents: List["MultiLanguageInput"],
documents: List["models.MultiLanguageInput"],
model_version: Optional[str] = None,
show_stats: Optional[bool] = None,
**kwargs
Expand All @@ -44,13 +44,14 @@ async def entities_recognition_general(
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.EntitiesResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))

_input = models.MultiLanguageBatchInput(documents=documents)
content_type = kwargs.pop("content_type", "application/json")

# Construct URL
url = self.entities_recognition_general.metadata['url']
url = self.entities_recognition_general.metadata['url'] # type: ignore
path_format_arguments = {
'Endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
Expand Down Expand Up @@ -88,82 +89,11 @@ async def entities_recognition_general(
return cls(pipeline_response, deserialized, {})

return deserialized
entities_recognition_general.metadata = {'url': '/entities/recognition/general'}

async def entities_recognition_pii(
self,
documents: List["MultiLanguageInput"],
model_version: Optional[str] = None,
show_stats: Optional[bool] = None,
**kwargs
) -> "models.EntitiesResult":
"""The API returns a list of entities with personal information (\"SSN\", \"Bank Account\" etc) in the document. For the list of supported entity types, check :code:`<a href="https://aka.ms/tanerpii">Supported Entity Types in Text Analytics API</a>`. See the :code:`<a href="https://aka.ms/talangs">Supported languages in Text Analytics API</a>` for the list of enabled languages.

Entities containing personal information.

:param documents: The set of documents to process as part of this batch.
:type documents: list[~azure.ai.textanalytics.models.MultiLanguageInput]
:param model_version: (Optional) This value indicates which model will be used for scoring. If
a model-version is not specified, the API should default to the latest, non-preview version.
:type model_version: str
:param show_stats: (Optional) if set to true, response will contain input and document level
statistics.
:type show_stats: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EntitiesResult or the result of cls(response)
:rtype: ~azure.ai.textanalytics.models.EntitiesResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.EntitiesResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})

_input = models.MultiLanguageBatchInput(documents=documents)
content_type = kwargs.pop("content_type", "application/json")

# Construct URL
url = self.entities_recognition_pii.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)

# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if model_version is not None:
query_parameters['model-version'] = self._serialize.query("model_version", model_version, 'str')
if show_stats is not None:
query_parameters['showStats'] = self._serialize.query("show_stats", show_stats, 'bool')

# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'

# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_input, 'MultiLanguageBatchInput')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)

pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response

if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.TextAnalyticsError, response)
raise HttpResponseError(response=response, model=error)

deserialized = self._deserialize('EntitiesResult', pipeline_response)

if cls:
return cls(pipeline_response, deserialized, {})

return deserialized
entities_recognition_pii.metadata = {'url': '/entities/recognition/pii'}
entities_recognition_general.metadata = {'url': '/entities/recognition/general'} # type: ignore

async def entities_linking(
self,
documents: List["MultiLanguageInput"],
documents: List["models.MultiLanguageInput"],
model_version: Optional[str] = None,
show_stats: Optional[bool] = None,
**kwargs
Expand All @@ -186,13 +116,14 @@ async def entities_linking(
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.EntityLinkingResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))

_input = models.MultiLanguageBatchInput(documents=documents)
content_type = kwargs.pop("content_type", "application/json")

# Construct URL
url = self.entities_linking.metadata['url']
url = self.entities_linking.metadata['url'] # type: ignore
path_format_arguments = {
'Endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
Expand Down Expand Up @@ -230,11 +161,11 @@ async def entities_linking(
return cls(pipeline_response, deserialized, {})

return deserialized
entities_linking.metadata = {'url': '/entities/linking'}
entities_linking.metadata = {'url': '/entities/linking'} # type: ignore

async def key_phrases(
self,
documents: List["MultiLanguageInput"],
documents: List["models.MultiLanguageInput"],
model_version: Optional[str] = None,
show_stats: Optional[bool] = None,
**kwargs
Expand All @@ -257,13 +188,14 @@ async def key_phrases(
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.KeyPhraseResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))

_input = models.MultiLanguageBatchInput(documents=documents)
content_type = kwargs.pop("content_type", "application/json")

# Construct URL
url = self.key_phrases.metadata['url']
url = self.key_phrases.metadata['url'] # type: ignore
path_format_arguments = {
'Endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
Expand Down Expand Up @@ -301,11 +233,11 @@ async def key_phrases(
return cls(pipeline_response, deserialized, {})

return deserialized
key_phrases.metadata = {'url': '/keyPhrases'}
key_phrases.metadata = {'url': '/keyPhrases'} # type: ignore

async def languages(
self,
documents: List["LanguageInput"],
documents: List["models.LanguageInput"],
model_version: Optional[str] = None,
show_stats: Optional[bool] = None,
**kwargs
Expand All @@ -328,13 +260,14 @@ async def languages(
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.LanguageResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))

_input = models.LanguageBatchInput(documents=documents)
content_type = kwargs.pop("content_type", "application/json")

# Construct URL
url = self.languages.metadata['url']
url = self.languages.metadata['url'] # type: ignore
path_format_arguments = {
'Endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
Expand Down Expand Up @@ -372,11 +305,11 @@ async def languages(
return cls(pipeline_response, deserialized, {})

return deserialized
languages.metadata = {'url': '/languages'}
languages.metadata = {'url': '/languages'} # type: ignore

async def sentiment(
self,
documents: List["MultiLanguageInput"],
documents: List["models.MultiLanguageInput"],
model_version: Optional[str] = None,
show_stats: Optional[bool] = None,
**kwargs
Expand All @@ -399,13 +332,14 @@ async def sentiment(
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SentimentResponse"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))

_input = models.MultiLanguageBatchInput(documents=documents)
content_type = kwargs.pop("content_type", "application/json")

# Construct URL
url = self.sentiment.metadata['url']
url = self.sentiment.metadata['url'] # type: ignore
path_format_arguments = {
'Endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True),
}
Expand Down Expand Up @@ -443,4 +377,4 @@ async def sentiment(
return cls(pipeline_response, deserialized, {})

return deserialized
sentiment.metadata = {'url': '/sentiment'}
sentiment.metadata = {'url': '/sentiment'} # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from ._models_py3 import SentimentConfidenceScorePerLabel
from ._models_py3 import SentimentResponse
from ._models_py3 import TextAnalyticsError
from ._models_py3 import TextAnalyticsWarning
except (SyntaxError, ImportError):
from ._models import DetectedLanguage # type: ignore
from ._models import DocumentEntities # type: ignore
Expand All @@ -58,12 +59,14 @@
from ._models import SentimentConfidenceScorePerLabel # type: ignore
from ._models import SentimentResponse # type: ignore
from ._models import TextAnalyticsError # type: ignore
from ._models import TextAnalyticsWarning # type: ignore

from ._text_analytics_client_enums import (
DocumentSentimentValue,
ErrorCodeValue,
InnerErrorCodeValue,
SentenceSentimentValue,
WarningCodeValue,
)

__all__ = [
Expand Down Expand Up @@ -92,8 +95,10 @@
'SentimentConfidenceScorePerLabel',
'SentimentResponse',
'TextAnalyticsError',
'TextAnalyticsWarning',
'DocumentSentimentValue',
'ErrorCodeValue',
'InnerErrorCodeValue',
'SentenceSentimentValue',
'WarningCodeValue',
]
Loading