Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[AutoRelease] t2-loganalytics-2022-02-14-99663(Do not merge) #23047

Merged
merged 1 commit into from
Feb 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions sdk/loganalytics/azure-mgmt-loganalytics/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# Release History

## 13.0.0b2 (2022-02-14)

**Features**

- Added operation TablesOperations.migrate

## 13.0.0b1 (2022-01-18)

**Features**
Expand Down
4 changes: 2 additions & 2 deletions sdk/loganalytics/azure-mgmt-loganalytics/_meta.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
"@autorest/python@5.12.0",
"@autorest/modelerfour@4.19.3"
],
"commit": "23ca45b7dfce24112bc686bae70c4424f33ae69e",
"commit": "24b1f9ed9a0768a55b14cced1761a40f7c12b6d2",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --python3-only --track2 --use=@autorest/python@5.12.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2",
"autorest_command": "autorest specification/operationalinsights/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/azure-sdk-for-python/sdk --python3-only --track2 --use=@autorest/python@5.12.0 --use=@autorest/modelerfour@4.19.3 --version=3.7.2",
"readme": "specification/operationalinsights/resource-manager/readme.md"
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

VERSION = "13.0.0b1"
VERSION = "13.0.0b2"
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

from ... import models as _models
from ..._vendor import _convert_request
from ...operations._tables_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_by_workspace_request, build_update_request_initial
from ...operations._tables_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_by_workspace_request, build_migrate_request, build_update_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]

Expand Down Expand Up @@ -526,3 +526,56 @@ def get_long_running_output(pipeline_response):
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)

begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}'} # type: ignore

@distributed_trace_async
async def migrate(
self,
resource_group_name: str,
workspace_name: str,
table_name: str,
**kwargs: Any
) -> None:
"""Migrate a Log Analytics table from support of the Data Collector API and Custom Fields features
to support of Data Collection Rule-based Custom Logs.

:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param table_name: The name of the table.
:type table_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))


request = build_migrate_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
workspace_name=workspace_name,
table_name=table_name,
template_url=self.migrate.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)

pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response

if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)

if cls:
return cls(pipeline_response, None, {})

migrate.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}/migrate'} # type: ignore

Original file line number Diff line number Diff line change
Expand Up @@ -245,12 +245,12 @@ class StorageInsightState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
ERROR = "ERROR"

class TablePlanEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
"""The table plan.
"""Instruct the system how to handle and charge the logs ingested to this table.
"""

#: Basic - logs that are adjusted to support high volume / low value verbose logs.
#: Logs that are adjusted to support high volume low value verbose logs.
BASIC = "Basic"
#: Analytics - logs that allow monitoring and analytics.
#: Logs that allow monitoring and analytics.
ANALYTICS = "Analytics"

class TableSubTypeEnum(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1715,8 +1715,6 @@ def __init__(
class RestoredLogs(msrest.serialization.Model):
"""Restore parameters.

Variables are only populated by the server, and will be ignored when sending a request.

:ivar start_restore_time: The timestamp to start the restore from (UTC).
:vartype start_restore_time: ~datetime.datetime
:ivar end_restore_time: The timestamp to end the restore by (UTC).
Expand All @@ -1725,10 +1723,6 @@ class RestoredLogs(msrest.serialization.Model):
:vartype source_table: str
"""

_validation = {
'source_table': {'readonly': True},
}

_attribute_map = {
'start_restore_time': {'key': 'startRestoreTime', 'type': 'iso-8601'},
'end_restore_time': {'key': 'endRestoreTime', 'type': 'iso-8601'},
Expand All @@ -1740,18 +1734,21 @@ def __init__(
*,
start_restore_time: Optional[datetime.datetime] = None,
end_restore_time: Optional[datetime.datetime] = None,
source_table: Optional[str] = None,
**kwargs
):
"""
:keyword start_restore_time: The timestamp to start the restore from (UTC).
:paramtype start_restore_time: ~datetime.datetime
:keyword end_restore_time: The timestamp to end the restore by (UTC).
:paramtype end_restore_time: ~datetime.datetime
:keyword source_table: The table to restore data from.
:paramtype source_table: str
"""
super(RestoredLogs, self).__init__(**kwargs)
self.start_restore_time = start_restore_time
self.end_restore_time = end_restore_time
self.source_table = None
self.source_table = source_table


class ResultStatistics(msrest.serialization.Model):
Expand Down Expand Up @@ -2228,15 +2225,15 @@ class SearchResults(msrest.serialization.Model):

:ivar query: Search job query.
:vartype query: str
:ivar description: Search results table's Description.
:ivar description: Search job Description.
:vartype description: str
:ivar limit: Limit the search job to return up to specified number of rows.
:vartype limit: int
:ivar start_search_time: The timestamp to start the search from (UTC).
:vartype start_search_time: ~datetime.datetime
:ivar end_search_time: The timestamp to end the search by (UTC).
:vartype end_search_time: ~datetime.datetime
:ivar source_table: The table to search data from.
:ivar source_table: The table used in the search job.
:vartype source_table: str
"""

Expand Down Expand Up @@ -2266,7 +2263,7 @@ def __init__(
"""
:keyword query: Search job query.
:paramtype query: str
:keyword description: Search results table's Description.
:keyword description: Search job Description.
:paramtype description: str
:keyword limit: Limit the search job to return up to specified number of rows.
:paramtype limit: int
Expand Down Expand Up @@ -2690,11 +2687,11 @@ class Table(ProxyResource):
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.loganalytics.models.SystemData
:ivar retention_in_days: The data table data retention in days, between 4 and 730. Setting this
property to null will default to the workspace retention.
:ivar retention_in_days: The table retention in days, between 4 and 730. Setting this property
to -1 will default to the workspace retention.
:vartype retention_in_days: int
:ivar total_retention_in_days: The table data total retention in days, between 4 and 2555.
Setting this property to null will default to table retention.
:ivar total_retention_in_days: The table total retention in days, between 4 and 2555. Setting
this property to -1 will default to table retention.
:vartype total_retention_in_days: int
:ivar archive_retention_in_days: The table data archive retention in days. Calculated as
(totalRetentionInDays-retentionInDays).
Expand All @@ -2705,7 +2702,8 @@ class Table(ProxyResource):
:vartype restored_logs: ~azure.mgmt.loganalytics.models.RestoredLogs
:ivar result_statistics: Search job execution statistics.
:vartype result_statistics: ~azure.mgmt.loganalytics.models.ResultStatistics
:ivar plan: The table plan. Possible values include: "Basic", "Analytics".
:ivar plan: Instruct the system how to handle and charge the logs ingested to this table.
Possible values include: "Basic", "Analytics".
:vartype plan: str or ~azure.mgmt.loganalytics.models.TablePlanEnum
:ivar last_plan_modified_date: The timestamp that table plan was last modified (UTC).
:vartype last_plan_modified_date: str
Expand Down Expand Up @@ -2759,19 +2757,20 @@ def __init__(
**kwargs
):
"""
:keyword retention_in_days: The data table data retention in days, between 4 and 730. Setting
this property to null will default to the workspace retention.
:keyword retention_in_days: The table retention in days, between 4 and 730. Setting this
property to -1 will default to the workspace retention.
:paramtype retention_in_days: int
:keyword total_retention_in_days: The table data total retention in days, between 4 and 2555.
Setting this property to null will default to table retention.
:keyword total_retention_in_days: The table total retention in days, between 4 and 2555.
Setting this property to -1 will default to table retention.
:paramtype total_retention_in_days: int
:keyword search_results: Parameters of the search job that initiated this table.
:paramtype search_results: ~azure.mgmt.loganalytics.models.SearchResults
:keyword restored_logs: Parameters of the restore operation that initiated this table.
:paramtype restored_logs: ~azure.mgmt.loganalytics.models.RestoredLogs
:keyword result_statistics: Search job execution statistics.
:paramtype result_statistics: ~azure.mgmt.loganalytics.models.ResultStatistics
:keyword plan: The table plan. Possible values include: "Basic", "Analytics".
:keyword plan: Instruct the system how to handle and charge the logs ingested to this table.
Possible values include: "Basic", "Analytics".
:paramtype plan: str or ~azure.mgmt.loganalytics.models.TablePlanEnum
:keyword schema: Table schema.
:paramtype schema: ~azure.mgmt.loganalytics.models.Schema
Expand Down Expand Up @@ -3015,6 +3014,7 @@ class Workspace(TrackedResource):
'type': {'readonly': True},
'location': {'required': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'customer_id': {'readonly': True},
'created_date': {'readonly': True},
'modified_date': {'readonly': True},
Expand Down Expand Up @@ -3050,7 +3050,6 @@ def __init__(
location: str,
tags: Optional[Dict[str, str]] = None,
e_tag: Optional[str] = None,
provisioning_state: Optional[Union[str, "WorkspaceEntityStatus"]] = None,
sku: Optional["WorkspaceSku"] = None,
retention_in_days: Optional[int] = None,
workspace_capping: Optional["WorkspaceCapping"] = None,
Expand All @@ -3068,9 +3067,6 @@ def __init__(
:paramtype location: str
:keyword e_tag: The ETag of the workspace.
:paramtype e_tag: str
:keyword provisioning_state: The provisioning state of the workspace. Possible values include:
"Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating".
:paramtype provisioning_state: str or ~azure.mgmt.loganalytics.models.WorkspaceEntityStatus
:keyword sku: The SKU of the workspace.
:paramtype sku: ~azure.mgmt.loganalytics.models.WorkspaceSku
:keyword retention_in_days: The workspace data retention in days. Allowed values are per
Expand Down Expand Up @@ -3099,7 +3095,7 @@ def __init__(
super(Workspace, self).__init__(tags=tags, location=location, **kwargs)
self.system_data = None
self.e_tag = e_tag
self.provisioning_state = provisioning_state
self.provisioning_state = None
self.customer_id = None
self.sku = sku
self.retention_in_days = retention_in_days
Expand Down Expand Up @@ -3358,6 +3354,7 @@ class WorkspacePatch(AzureEntityResource):
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'customer_id': {'readonly': True},
'created_date': {'readonly': True},
'modified_date': {'readonly': True},
Expand Down Expand Up @@ -3389,7 +3386,6 @@ def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
provisioning_state: Optional[Union[str, "WorkspaceEntityStatus"]] = None,
sku: Optional["WorkspaceSku"] = None,
retention_in_days: Optional[int] = None,
workspace_capping: Optional["WorkspaceCapping"] = None,
Expand All @@ -3403,9 +3399,6 @@ def __init__(
"""
:keyword tags: A set of tags. Resource tags. Optional.
:paramtype tags: dict[str, str]
:keyword provisioning_state: The provisioning state of the workspace. Possible values include:
"Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating".
:paramtype provisioning_state: str or ~azure.mgmt.loganalytics.models.WorkspaceEntityStatus
:keyword sku: The SKU of the workspace.
:paramtype sku: ~azure.mgmt.loganalytics.models.WorkspaceSku
:keyword retention_in_days: The workspace data retention in days. Allowed values are per
Expand Down Expand Up @@ -3433,7 +3426,7 @@ def __init__(
"""
super(WorkspacePatch, self).__init__(**kwargs)
self.tags = tags
self.provisioning_state = provisioning_state
self.provisioning_state = None
self.customer_id = None
self.sku = sku
self.retention_in_days = retention_in_days
Expand Down
Loading