Skip to content

Commit

Permalink
[AutoRelease] t2-loganalytics-2021-07-12-55352 (#19751)
Browse files Browse the repository at this point in the history
* CodeGen from PR 14953 in Azure/azure-rest-api-specs
Vipinhas/Remove Cluster max capacity and Add new Api Version (#14953)

* removing read only property, no longer in use - NOT a breaking change

* Remove max capacity resrvation level from cluster

* Add new api version to support new capaciry res level tiers

* remove new api version

* add copy of last dir. for new api-version

* add new api version changes

* add units

* version,CHANGELOG

* test

Co-authored-by: SDKAuto <sdkautomation@microsoft.com>
Co-authored-by: PythonSdkPipelines <PythonSdkPipelines>
  • Loading branch information
Azure CLI Bot and SDKAuto authored Jul 12, 2021
1 parent 863f687 commit c8d75cf
Show file tree
Hide file tree
Showing 44 changed files with 329 additions and 333 deletions.
21 changes: 21 additions & 0 deletions sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,26 @@
# Release History

## 11.0.0 (2021-07-12)

**Features**

- Model ClusterPatch has a new parameter billing_type
- Model Workspace has a new parameter features
- Model WorkspacePatch has a new parameter features
- Model WorkspaceFeatures has a new parameter disable_local_auth

**Breaking changes**

- Model Workspace no longer has parameter immediate_purge_data_on30_days
- Model Workspace no longer has parameter enable_log_access_using_only_resource_permissions
- Model Workspace no longer has parameter cluster_resource_id
- Model Workspace no longer has parameter enable_data_export
- Model WorkspacePatch no longer has parameter immediate_purge_data_on30_days
- Model WorkspacePatch no longer has parameter enable_log_access_using_only_resource_permissions
- Model WorkspacePatch no longer has parameter cluster_resource_id
- Model WorkspacePatch no longer has parameter enable_data_export
- Model CapacityReservationProperties no longer has parameter max_capacity

## 10.0.0 (2021-05-13)

**Features**
Expand Down
11 changes: 7 additions & 4 deletions sdk/loganalytics/azure-mgmt-loganalytics/_meta.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
{
"autorest": "3.3.0",
"use": "@autorest/python@5.6.6",
"commit": "96e8b312e8e732cd6ce52dae178ff0ae3768f8da",
"autorest": "3.4.2",
"use": [
"@autorest/python@5.8.1",
"@autorest/modelerfour@4.19.2"
],
"commit": "313e78fa9784a6c17e8dcbb65fdf5f4b7077a184",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.6.6 --version=3.3.0",
"autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.1 --use=@autorest/modelerfour@4.19.2 --version=3.4.2",
"readme": "specification/operationalinsights/resource-manager/readme.md"
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

VERSION = "10.0.0"
VERSION = "11.0.0"
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ async def list_by_workspace(
self,
resource_group_name: str,
workspace_name: str,
**kwargs
**kwargs: Any
) -> List["_models.AvailableServiceTier"]:
"""Gets the available service tiers for the workspace.
Expand All @@ -69,7 +69,7 @@ async def list_by_workspace(
url = self.list_by_workspace.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'),
}
url = self._client.format_url(url, **path_format_arguments)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(self, client, config, serializer, deserializer) -> None:
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
**kwargs: Any
) -> AsyncIterable["_models.ClusterListResult"]:
"""Gets Log Analytics clusters in a resource group.
Expand Down Expand Up @@ -74,7 +74,7 @@ def prepare_request(next_link=None):
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
Expand Down Expand Up @@ -116,7 +116,7 @@ async def get_next(next_link=None):

def list(
self,
**kwargs
**kwargs: Any
) -> AsyncIterable["_models.ClusterListResult"]:
"""Gets the Log Analytics clusters in a subscription.
Expand Down Expand Up @@ -186,7 +186,7 @@ async def _create_or_update_initial(
resource_group_name: str,
cluster_name: str,
parameters: "_models.Cluster",
**kwargs
**kwargs: Any
) -> Optional["_models.Cluster"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Cluster"]]
error_map = {
Expand All @@ -200,7 +200,7 @@ async def _create_or_update_initial(
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'),
}
Expand Down Expand Up @@ -245,7 +245,7 @@ async def begin_create_or_update(
resource_group_name: str,
cluster_name: str,
parameters: "_models.Cluster",
**kwargs
**kwargs: Any
) -> AsyncLROPoller["_models.Cluster"]:
"""Create or update a Log Analytics cluster.
Expand All @@ -257,8 +257,8 @@ async def begin_create_or_update(
:type parameters: ~azure.mgmt.loganalytics.models.Cluster
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response)
Expand Down Expand Up @@ -292,7 +292,7 @@ def get_long_running_output(pipeline_response):
return deserialized

path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'),
}
Expand All @@ -315,7 +315,7 @@ async def _delete_initial(
self,
resource_group_name: str,
cluster_name: str,
**kwargs
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
Expand All @@ -328,7 +328,7 @@ async def _delete_initial(
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
Expand All @@ -346,7 +346,7 @@ async def _delete_initial(
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response

if response.status_code not in [200, 204]:
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
Expand All @@ -360,7 +360,7 @@ async def begin_delete(
self,
resource_group_name: str,
cluster_name: str,
**kwargs
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a cluster instance.
Expand All @@ -370,8 +370,8 @@ async def begin_delete(
:type cluster_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
Expand Down Expand Up @@ -401,7 +401,7 @@ def get_long_running_output(pipeline_response):
return cls(pipeline_response, None, {})

path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
Expand All @@ -424,7 +424,7 @@ async def get(
self,
resource_group_name: str,
cluster_name: str,
**kwargs
**kwargs: Any
) -> "_models.Cluster":
"""Gets a Log Analytics cluster instance.
Expand All @@ -449,7 +449,7 @@ async def get(
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
Expand Down Expand Up @@ -484,7 +484,7 @@ async def update(
resource_group_name: str,
cluster_name: str,
parameters: "_models.ClusterPatch",
**kwargs
**kwargs: Any
) -> "_models.Cluster":
"""Updates a Log Analytics cluster.
Expand All @@ -511,7 +511,7 @@ async def update(
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def list_by_workspace(
self,
resource_group_name: str,
workspace_name: str,
**kwargs
**kwargs: Any
) -> AsyncIterable["_models.DataExportListResult"]:
"""Lists the data export instances within a workspace.
Expand Down Expand Up @@ -76,7 +76,7 @@ def prepare_request(next_link=None):
url = self.list_by_workspace.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'),
}
url = self._client.format_url(url, **path_format_arguments)
Expand Down Expand Up @@ -122,7 +122,7 @@ async def create_or_update(
workspace_name: str,
data_export_name: str,
parameters: "_models.DataExport",
**kwargs
**kwargs: Any
) -> "_models.DataExport":
"""Create or update a data export.
Expand Down Expand Up @@ -152,7 +152,7 @@ async def create_or_update(
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'),
'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z][A-Za-z0-9-]+[A-Za-z0-9]$'),
}
Expand Down Expand Up @@ -196,7 +196,7 @@ async def get(
resource_group_name: str,
workspace_name: str,
data_export_name: str,
**kwargs
**kwargs: Any
) -> "_models.DataExport":
"""Gets a data export instance.
Expand All @@ -223,7 +223,7 @@ async def get(
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'),
'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str'),
}
Expand Down Expand Up @@ -259,7 +259,7 @@ async def delete(
resource_group_name: str,
workspace_name: str,
data_export_name: str,
**kwargs
**kwargs: Any
) -> None:
"""Deletes the specified data export in a given workspace..
Expand All @@ -286,7 +286,7 @@ async def delete(
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'),
'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str'),
}
Expand Down
Loading

0 comments on commit c8d75cf

Please sign in to comment.