From fff323fe93cd386d38f4f3db42bd9b61f3f22eec Mon Sep 17 00:00:00 2001 From: Hector Castejon Diaz Date: Mon, 22 Jul 2024 14:08:23 +0200 Subject: [PATCH 1/2] [Internal] Update OpenAPI spec --- .codegen/_openapi_sha | 2 +- .codegen/service.py.tmpl | 4 +- databricks/sdk/__init__.py | 65 +- databricks/sdk/service/billing.py | 689 ++++-- databricks/sdk/service/catalog.py | 51 +- databricks/sdk/service/compute.py | 182 +- databricks/sdk/service/dashboards.py | 598 ++++- databricks/sdk/service/iam.py | 144 +- databricks/sdk/service/jobs.py | 7 +- databricks/sdk/service/marketplace.py | 124 +- databricks/sdk/service/oauth2.py | 197 +- databricks/sdk/service/serving.py | 240 +- databricks/sdk/service/settings.py | 427 +++- databricks/sdk/service/sharing.py | 1 + databricks/sdk/service/sql.py | 2931 ++++++++++++++++++++----- tests/test_core.py | 4 +- 16 files changed, 4519 insertions(+), 1147 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index c4b47ca14..ed18d818d 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -7437dabb9dadee402c1fc060df4c1ce8cc5369f0 \ No newline at end of file +37e2bbe0cbcbbbe78a06a018d4fab06314a26a40 \ No newline at end of file diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl index 643b1f33a..b4e6b1dc9 100644 --- a/.codegen/service.py.tmpl +++ b/.codegen/service.py.tmpl @@ -295,7 +295,7 @@ class {{.PascalName}}API:{{if .Description}} {{if .NeedsOffsetDedupe -}} # deduplicate items that may have been added during iteration seen = set() - {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.0/clusters/events")) }} + {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.1/clusters/events")) }} query['{{.Pagination.Offset.Name}}'] = {{- if eq .Pagination.Increment 1 -}} 1 @@ -321,7 +321,7 @@ class {{.PascalName}}API:{{if .Description}} if '{{.Pagination.Token.Bind.Name}}' not in json or not json['{{.Pagination.Token.Bind.Name}}']: return {{if or (eq "GET" .Verb) (eq "HEAD" .Verb)}}query{{else}}body{{end}}['{{.Pagination.Token.PollField.Name}}'] = json['{{.Pagination.Token.Bind.Name}}'] - {{- else if eq .Path "/api/2.0/clusters/events" -}} + {{- else if eq .Path "/api/2.1/clusters/events" -}} if 'next_page' not in json or not json['next_page']: return body = json['next_page'] diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 8485efbac..7603678e3 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -27,7 +27,7 @@ InstancePoolsAPI, InstanceProfilesAPI, LibrariesAPI, PolicyFamiliesAPI) -from databricks.sdk.service.dashboards import LakeviewAPI +from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccountAccessControlAPI, AccountAccessControlProxyAPI, @@ -68,6 +68,7 @@ EsmEnablementAccountAPI, IpAccessListsAPI, NetworkConnectivityAPI, + NotificationDestinationsAPI, PersonalComputeAPI, RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, @@ -75,11 +76,13 @@ from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI, RecipientActivationAPI, RecipientsAPI, SharesAPI) -from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI, - DashboardWidgetsAPI, DataSourcesAPI, - DbsqlPermissionsAPI, QueriesAPI, +from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI, + DashboardsAPI, DashboardWidgetsAPI, + DataSourcesAPI, DbsqlPermissionsAPI, + QueriesAPI, QueriesLegacyAPI, QueryHistoryAPI, QueryVisualizationsAPI, + QueryVisualizationsLegacyAPI, StatementExecutionAPI, WarehousesAPI) from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, VectorSearchIndexesAPI) @@ -166,6 +169,7 @@ def __init__(self, serving_endpoints = ServingEndpointsAPI(self._api_client) self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client) self._alerts = AlertsAPI(self._api_client) + self._alerts_legacy = AlertsLegacyAPI(self._api_client) self._apps = AppsAPI(self._api_client) self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client) self._catalogs = CatalogsAPI(self._api_client) @@ -190,6 +194,7 @@ def __init__(self, self._external_locations = ExternalLocationsAPI(self._api_client) self._files = FilesAPI(self._api_client) self._functions = FunctionsAPI(self._api_client) + self._genie = GenieAPI(self._api_client) self._git_credentials = GitCredentialsAPI(self._api_client) self._global_init_scripts = GlobalInitScriptsAPI(self._api_client) self._grants = GrantsAPI(self._api_client) @@ -203,6 +208,7 @@ def __init__(self, self._metastores = MetastoresAPI(self._api_client) self._model_registry = ModelRegistryAPI(self._api_client) self._model_versions = ModelVersionsAPI(self._api_client) + self._notification_destinations = NotificationDestinationsAPI(self._api_client) self._online_tables = OnlineTablesAPI(self._api_client) self._permission_migration = PermissionMigrationAPI(self._api_client) self._permissions = PermissionsAPI(self._api_client) @@ -219,8 +225,10 @@ def __init__(self, self._providers = ProvidersAPI(self._api_client) self._quality_monitors = QualityMonitorsAPI(self._api_client) self._queries = QueriesAPI(self._api_client) + self._queries_legacy = QueriesLegacyAPI(self._api_client) self._query_history = QueryHistoryAPI(self._api_client) self._query_visualizations = QueryVisualizationsAPI(self._api_client) + self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client) self._recipient_activation = RecipientActivationAPI(self._api_client) self._recipients = RecipientsAPI(self._api_client) self._registered_models = RegisteredModelsAPI(self._api_client) @@ -270,6 +278,11 @@ def alerts(self) -> AlertsAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts + @property + def alerts_legacy(self) -> AlertsLegacyAPI: + """The alerts API can be used to perform CRUD operations on alerts.""" + return self._alerts_legacy + @property def apps(self) -> AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" @@ -390,6 +403,11 @@ def functions(self) -> FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" return self._functions + @property + def genie(self) -> GenieAPI: + """Genie provides a no-code experience for business users, powered by AI/BI.""" + return self._genie + @property def git_credentials(self) -> GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user.""" @@ -455,6 +473,11 @@ def model_versions(self) -> ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._model_versions + @property + def notification_destinations(self) -> NotificationDestinationsAPI: + """The notification destinations API lets you programmatically manage a workspace's notification destinations.""" + return self._notification_destinations + @property def online_tables(self) -> OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" @@ -462,7 +485,7 @@ def online_tables(self) -> OnlineTablesAPI: @property def permission_migration(self) -> PermissionMigrationAPI: - """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx.""" + """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx.""" return self._permission_migration @property @@ -527,19 +550,29 @@ def quality_monitors(self) -> QualityMonitorsAPI: @property def queries(self) -> QueriesAPI: - """These endpoints are used for CRUD operations on query definitions.""" + """The queries API can be used to perform CRUD operations on queries.""" return self._queries + @property + def queries_legacy(self) -> QueriesLegacyAPI: + """These endpoints are used for CRUD operations on query definitions.""" + return self._queries_legacy + @property def query_history(self) -> QueryHistoryAPI: - """Access the history of queries through SQL warehouses.""" + """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless compute, and DLT.""" return self._query_history @property def query_visualizations(self) -> QueryVisualizationsAPI: - """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" + """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace.""" return self._query_visualizations + @property + def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI: + """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" + return self._query_visualizations_legacy + @property def recipient_activation(self) -> RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`.""" @@ -742,7 +775,6 @@ def __init__(self, self._api_client = client.ApiClient(self._config) self._access_control = AccountAccessControlAPI(self._api_client) self._billable_usage = BillableUsageAPI(self._api_client) - self._budgets = BudgetsAPI(self._api_client) self._credentials = CredentialsAPI(self._api_client) self._custom_app_integration = CustomAppIntegrationAPI(self._api_client) self._encryption_keys = EncryptionKeysAPI(self._api_client) @@ -765,6 +797,7 @@ def __init__(self, self._vpc_endpoints = VpcEndpointsAPI(self._api_client) self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client) self._workspaces = WorkspacesAPI(self._api_client) + self._budgets = BudgetsAPI(self._api_client) @property def config(self) -> client.Config: @@ -784,11 +817,6 @@ def billable_usage(self) -> BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range.""" return self._billable_usage - @property - def budgets(self) -> BudgetsAPI: - """These APIs manage budget configuration including notifications for exceeding a budget for a period.""" - return self._budgets - @property def credentials(self) -> CredentialsAPI: """These APIs manage credential configurations for this workspace.""" @@ -796,7 +824,7 @@ def credentials(self) -> CredentialsAPI: @property def custom_app_integration(self) -> CustomAppIntegrationAPI: - """These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" + """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" return self._custom_app_integration @property @@ -851,7 +879,7 @@ def private_access(self) -> PrivateAccessAPI: @property def published_app_integration(self) -> PublishedAppIntegrationAPI: - """These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" + """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" return self._published_app_integration @property @@ -899,6 +927,11 @@ def workspaces(self) -> WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces + @property + def budgets(self) -> BudgetsAPI: + """These APIs manage budget configurations for this account.""" + return self._budgets + def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: """Constructs a ``WorkspaceClient`` for the given workspace. diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 1d4a773c6..d2ef50bc3 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -15,204 +15,372 @@ @dataclass -class Budget: - """Budget configuration to be created.""" +class ActionConfiguration: + action_configuration_id: Optional[str] = None + """Databricks action configuration ID.""" - name: str - """Human-readable name of the budget.""" + action_type: Optional[ActionConfigurationType] = None + """The type of the action.""" - period: str - """Period length in years, months, weeks and/or days. Examples: `1 month`, `30 days`, `1 year, 2 - months, 1 week, 2 days`""" + target: Optional[str] = None + """Target for the action. For example, an email address.""" - start_date: str - """Start date of the budget period calculation.""" + def as_dict(self) -> dict: + """Serializes the ActionConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.action_configuration_id is not None: + body['action_configuration_id'] = self.action_configuration_id + if self.action_type is not None: body['action_type'] = self.action_type.value + if self.target is not None: body['target'] = self.target + return body - target_amount: str - """Target amount of the budget per period in USD.""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ActionConfiguration: + """Deserializes the ActionConfiguration from a dictionary.""" + return cls(action_configuration_id=d.get('action_configuration_id', None), + action_type=_enum(d, 'action_type', ActionConfigurationType), + target=d.get('target', None)) - filter: str - """SQL-like filter expression with workspaceId, SKU and tag. Usage in your account that matches - this expression will be counted in this budget. - - Supported properties on left-hand side of comparison: * `workspaceId` - the ID of the workspace - * `sku` - SKU of the cluster, e.g. `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag - name'` - tag of the cluster - - Supported comparison operators: * `=` - equal * `!=` - not equal - - Supported logical operators: `AND`, `OR`. - - Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND tag.'my tag'='my - value')` * `workspaceId!=456` * `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR - sku='PREMIUM_ALL_PURPOSE_COMPUTE'` * `tag.name1='value1' AND tag.name2='value2'`""" - alerts: Optional[List[BudgetAlert]] = None +class ActionConfigurationType(Enum): + + EMAIL_NOTIFICATION = 'EMAIL_NOTIFICATION' + + +@dataclass +class AlertConfiguration: + action_configurations: Optional[List[ActionConfiguration]] = None + """Configured actions for this alert. These define what happens when an alert enters a triggered + state.""" + + alert_configuration_id: Optional[str] = None + """Databricks alert configuration ID.""" + + quantity_threshold: Optional[str] = None + """The threshold for the budget alert to determine if it is in a triggered state. The number is + evaluated based on `quantity_type`.""" + + quantity_type: Optional[AlertConfigurationQuantityType] = None + """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured + in.""" - end_date: Optional[str] = None - """Optional end date of the budget.""" + time_period: Optional[AlertConfigurationTimePeriod] = None + """The time window of usage data for the budget.""" + + trigger_type: Optional[AlertConfigurationTriggerType] = None + """The evaluation method to determine when this budget alert is in a triggered state.""" def as_dict(self) -> dict: - """Serializes the Budget into a dictionary suitable for use as a JSON request body.""" + """Serializes the AlertConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alerts: body['alerts'] = [v.as_dict() for v in self.alerts] - if self.end_date is not None: body['end_date'] = self.end_date - if self.filter is not None: body['filter'] = self.filter - if self.name is not None: body['name'] = self.name - if self.period is not None: body['period'] = self.period - if self.start_date is not None: body['start_date'] = self.start_date - if self.target_amount is not None: body['target_amount'] = self.target_amount + if self.action_configurations: + body['action_configurations'] = [v.as_dict() for v in self.action_configurations] + if self.alert_configuration_id is not None: + body['alert_configuration_id'] = self.alert_configuration_id + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value + if self.time_period is not None: body['time_period'] = self.time_period.value + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> Budget: - """Deserializes the Budget from a dictionary.""" - return cls(alerts=_repeated_dict(d, 'alerts', BudgetAlert), - end_date=d.get('end_date', None), - filter=d.get('filter', None), - name=d.get('name', None), - period=d.get('period', None), - start_date=d.get('start_date', None), - target_amount=d.get('target_amount', None)) + def from_dict(cls, d: Dict[str, any]) -> AlertConfiguration: + """Deserializes the AlertConfiguration from a dictionary.""" + return cls(action_configurations=_repeated_dict(d, 'action_configurations', ActionConfiguration), + alert_configuration_id=d.get('alert_configuration_id', None), + quantity_threshold=d.get('quantity_threshold', None), + quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), + time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), + trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) + + +class AlertConfigurationQuantityType(Enum): + + LIST_PRICE_DOLLARS_USD = 'LIST_PRICE_DOLLARS_USD' + + +class AlertConfigurationTimePeriod(Enum): + + MONTH = 'MONTH' + + +class AlertConfigurationTriggerType(Enum): + + CUMULATIVE_SPENDING_EXCEEDED = 'CUMULATIVE_SPENDING_EXCEEDED' @dataclass -class BudgetAlert: - email_notifications: Optional[List[str]] = None - """List of email addresses to be notified when budget percentage is exceeded in the given period.""" +class BudgetConfiguration: + account_id: Optional[str] = None + """Databricks account ID.""" + + alert_configurations: Optional[List[AlertConfiguration]] = None + """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one + alert configuration.""" + + budget_configuration_id: Optional[str] = None + """Databricks budget configuration ID.""" + + create_time: Optional[int] = None + """Creation time of this budget configuration.""" + + display_name: Optional[str] = None + """Human-readable name of budget configuration. Max Length: 128""" - min_percentage: Optional[int] = None - """Percentage of the target amount used in the currect period that will trigger a notification.""" + filter: Optional[BudgetConfigurationFilter] = None + """Configured filters for this budget. These are applied to your account's usage to limit the scope + of what is considered for this budget. Leave empty to include all usage for this account. All + provided filters must be matched for usage to be included.""" + + update_time: Optional[int] = None + """Update time of this budget configuration.""" def as_dict(self) -> dict: - """Serializes the BudgetAlert into a dictionary suitable for use as a JSON request body.""" + """Serializes the BudgetConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email_notifications: body['email_notifications'] = [v for v in self.email_notifications] - if self.min_percentage is not None: body['min_percentage'] = self.min_percentage + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: + body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: + body['budget_configuration_id'] = self.budget_configuration_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetAlert: - """Deserializes the BudgetAlert from a dictionary.""" - return cls(email_notifications=d.get('email_notifications', None), - min_percentage=d.get('min_percentage', None)) + def from_dict(cls, d: Dict[str, any]) -> BudgetConfiguration: + """Deserializes the BudgetConfiguration from a dictionary.""" + return cls(account_id=d.get('account_id', None), + alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), + budget_configuration_id=d.get('budget_configuration_id', None), + create_time=d.get('create_time', None), + display_name=d.get('display_name', None), + filter=_from_dict(d, 'filter', BudgetConfigurationFilter), + update_time=d.get('update_time', None)) @dataclass -class BudgetList: - """List of budgets.""" +class BudgetConfigurationFilter: + tags: Optional[List[BudgetConfigurationFilterTagClause]] = None + """A list of tag keys and values that will limit the budget to usage that includes those specific + custom tags. Tags are case-sensitive and should be entered exactly as they appear in your usage + data.""" - budgets: Optional[List[BudgetWithStatus]] = None + workspace_id: Optional[BudgetConfigurationFilterWorkspaceIdClause] = None + """If provided, usage must match with the provided Databricks workspace IDs.""" def as_dict(self) -> dict: - """Serializes the BudgetList into a dictionary suitable for use as a JSON request body.""" + """Serializes the BudgetConfigurationFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets] + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetList: - """Deserializes the BudgetList from a dictionary.""" - return cls(budgets=_repeated_dict(d, 'budgets', BudgetWithStatus)) + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilter: + """Deserializes the BudgetConfigurationFilter from a dictionary.""" + return cls(tags=_repeated_dict(d, 'tags', BudgetConfigurationFilterTagClause), + workspace_id=_from_dict(d, 'workspace_id', BudgetConfigurationFilterWorkspaceIdClause)) @dataclass -class BudgetWithStatus: - """Budget configuration with daily status.""" +class BudgetConfigurationFilterClause: + operator: Optional[BudgetConfigurationFilterOperator] = None - alerts: Optional[List[BudgetAlert]] = None + values: Optional[List[str]] = None - budget_id: Optional[str] = None + def as_dict(self) -> dict: + """Serializes the BudgetConfigurationFilterClause into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.operator is not None: body['operator'] = self.operator.value + if self.values: body['values'] = [v for v in self.values] + return body - creation_time: Optional[str] = None + @classmethod + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterClause: + """Deserializes the BudgetConfigurationFilterClause from a dictionary.""" + return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), + values=d.get('values', None)) - end_date: Optional[str] = None - """Optional end date of the budget.""" - filter: Optional[str] = None - """SQL-like filter expression with workspaceId, SKU and tag. Usage in your account that matches - this expression will be counted in this budget. - - Supported properties on left-hand side of comparison: * `workspaceId` - the ID of the workspace - * `sku` - SKU of the cluster, e.g. `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag - name'` - tag of the cluster - - Supported comparison operators: * `=` - equal * `!=` - not equal - - Supported logical operators: `AND`, `OR`. - - Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND tag.'my tag'='my - value')` * `workspaceId!=456` * `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR - sku='PREMIUM_ALL_PURPOSE_COMPUTE'` * `tag.name1='value1' AND tag.name2='value2'`""" +class BudgetConfigurationFilterOperator(Enum): + + IN = 'IN' - name: Optional[str] = None - """Human-readable name of the budget.""" - period: Optional[str] = None - """Period length in years, months, weeks and/or days. Examples: `1 month`, `30 days`, `1 year, 2 - months, 1 week, 2 days`""" +@dataclass +class BudgetConfigurationFilterTagClause: + key: Optional[str] = None + + value: Optional[BudgetConfigurationFilterClause] = None + + def as_dict(self) -> dict: + """Serializes the BudgetConfigurationFilterTagClause into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value.as_dict() + return body - start_date: Optional[str] = None - """Start date of the budget period calculation.""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterTagClause: + """Deserializes the BudgetConfigurationFilterTagClause from a dictionary.""" + return cls(key=d.get('key', None), value=_from_dict(d, 'value', BudgetConfigurationFilterClause)) - status_daily: Optional[List[BudgetWithStatusStatusDailyItem]] = None - """Amount used in the budget for each day (noncumulative).""" - target_amount: Optional[str] = None - """Target amount of the budget per period in USD.""" +@dataclass +class BudgetConfigurationFilterWorkspaceIdClause: + operator: Optional[BudgetConfigurationFilterOperator] = None - update_time: Optional[str] = None + values: Optional[List[int]] = None def as_dict(self) -> dict: - """Serializes the BudgetWithStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alerts: body['alerts'] = [v.as_dict() for v in self.alerts] - if self.budget_id is not None: body['budget_id'] = self.budget_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.end_date is not None: body['end_date'] = self.end_date - if self.filter is not None: body['filter'] = self.filter - if self.name is not None: body['name'] = self.name - if self.period is not None: body['period'] = self.period - if self.start_date is not None: body['start_date'] = self.start_date - if self.status_daily: body['status_daily'] = [v.as_dict() for v in self.status_daily] - if self.target_amount is not None: body['target_amount'] = self.target_amount - if self.update_time is not None: body['update_time'] = self.update_time + if self.operator is not None: body['operator'] = self.operator.value + if self.values: body['values'] = [v for v in self.values] return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetWithStatus: - """Deserializes the BudgetWithStatus from a dictionary.""" - return cls(alerts=_repeated_dict(d, 'alerts', BudgetAlert), - budget_id=d.get('budget_id', None), - creation_time=d.get('creation_time', None), - end_date=d.get('end_date', None), - filter=d.get('filter', None), - name=d.get('name', None), - period=d.get('period', None), - start_date=d.get('start_date', None), - status_daily=_repeated_dict(d, 'status_daily', BudgetWithStatusStatusDailyItem), - target_amount=d.get('target_amount', None), - update_time=d.get('update_time', None)) + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdClause: + """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary.""" + return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), + values=d.get('values', None)) @dataclass -class BudgetWithStatusStatusDailyItem: - amount: Optional[str] = None - """Amount used in this day in USD.""" +class CreateBudgetConfigurationBudget: + account_id: Optional[str] = None + """Databricks account ID.""" + + alert_configurations: Optional[List[CreateBudgetConfigurationBudgetAlertConfigurations]] = None + """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one + alert configuration.""" - date: Optional[str] = None + display_name: Optional[str] = None + """Human-readable name of budget configuration. Max Length: 128""" + + filter: Optional[BudgetConfigurationFilter] = None + """Configured filters for this budget. These are applied to your account's usage to limit the scope + of what is considered for this budget. Leave empty to include all usage for this account. All + provided filters must be matched for usage to be included.""" def as_dict(self) -> dict: - """Serializes the BudgetWithStatusStatusDailyItem into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.amount is not None: body['amount'] = self.amount - if self.date is not None: body['date'] = self.date + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: + body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetWithStatusStatusDailyItem: - """Deserializes the BudgetWithStatusStatusDailyItem from a dictionary.""" - return cls(amount=d.get('amount', None), date=d.get('date', None)) + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudget: + """Deserializes the CreateBudgetConfigurationBudget from a dictionary.""" + return cls(account_id=d.get('account_id', None), + alert_configurations=_repeated_dict(d, 'alert_configurations', + CreateBudgetConfigurationBudgetAlertConfigurations), + display_name=d.get('display_name', None), + filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) + + +@dataclass +class CreateBudgetConfigurationBudgetActionConfigurations: + action_type: Optional[ActionConfigurationType] = None + """The type of the action.""" + + target: Optional[str] = None + """Target for the action. For example, an email address.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.action_type is not None: body['action_type'] = self.action_type.value + if self.target is not None: body['target'] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetActionConfigurations: + """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary.""" + return cls(action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None)) + + +@dataclass +class CreateBudgetConfigurationBudgetAlertConfigurations: + action_configurations: Optional[List[CreateBudgetConfigurationBudgetActionConfigurations]] = None + """Configured actions for this alert. These define what happens when an alert enters a triggered + state.""" + + quantity_threshold: Optional[str] = None + """The threshold for the budget alert to determine if it is in a triggered state. The number is + evaluated based on `quantity_type`.""" + + quantity_type: Optional[AlertConfigurationQuantityType] = None + """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured + in.""" + + time_period: Optional[AlertConfigurationTimePeriod] = None + """The time window of usage data for the budget.""" + + trigger_type: Optional[AlertConfigurationTriggerType] = None + """The evaluation method to determine when this budget alert is in a triggered state.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.action_configurations: + body['action_configurations'] = [v.as_dict() for v in self.action_configurations] + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value + if self.time_period is not None: body['time_period'] = self.time_period.value + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetAlertConfigurations: + """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary.""" + return cls(action_configurations=_repeated_dict(d, 'action_configurations', + CreateBudgetConfigurationBudgetActionConfigurations), + quantity_threshold=d.get('quantity_threshold', None), + quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), + time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), + trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) + + +@dataclass +class CreateBudgetConfigurationRequest: + budget: CreateBudgetConfigurationBudget + """Properties of the new budget configuration.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget: body['budget'] = self.budget.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationRequest: + """Deserializes the CreateBudgetConfigurationRequest from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', CreateBudgetConfigurationBudget)) + + +@dataclass +class CreateBudgetConfigurationResponse: + budget: Optional[BudgetConfiguration] = None + """The created budget configuration.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget: body['budget'] = self.budget.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse: + """Deserializes the CreateBudgetConfigurationResponse from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) @dataclass @@ -316,16 +484,16 @@ def from_dict(cls, d: Dict[str, any]) -> CreateLogDeliveryConfigurationParams: @dataclass -class DeleteResponse: +class DeleteBudgetConfigurationResponse: def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse: + """Deserializes the DeleteBudgetConfigurationResponse from a dictionary.""" return cls() @@ -361,6 +529,44 @@ def from_dict(cls, d: Dict[str, any]) -> DownloadResponse: return cls(contents=d.get('contents', None)) +@dataclass +class GetBudgetConfigurationResponse: + budget: Optional[BudgetConfiguration] = None + + def as_dict(self) -> dict: + """Serializes the GetBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget: body['budget'] = self.budget.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse: + """Deserializes the GetBudgetConfigurationResponse from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) + + +@dataclass +class ListBudgetConfigurationsResponse: + budgets: Optional[List[BudgetConfiguration]] = None + + next_page_token: Optional[str] = None + """Token which can be sent as `page_token` to retrieve the next page of results. If this field is + omitted, there are no subsequent budgets.""" + + def as_dict(self) -> dict: + """Serializes the ListBudgetConfigurationsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse: + """Deserializes the ListBudgetConfigurationsResponse from a dictionary.""" + return cls(budgets=_repeated_dict(d, 'budgets', BudgetConfiguration), + next_page_token=d.get('next_page_token', None)) + + class LogDeliveryConfigStatus(Enum): """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the @@ -586,81 +792,110 @@ def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse: @dataclass -class UpdateLogDeliveryConfigurationStatusRequest: - status: LogDeliveryConfigStatus - """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). - Defaults to `ENABLED`. You can [enable or disable the - configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration - is not supported, so disable a log delivery configuration that is no longer needed.""" +class UpdateBudgetConfigurationBudget: + account_id: Optional[str] = None + """Databricks account ID.""" - log_delivery_configuration_id: Optional[str] = None - """Databricks log delivery configuration ID""" + alert_configurations: Optional[List[AlertConfiguration]] = None + """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one + alert configuration.""" + + budget_configuration_id: Optional[str] = None + """Databricks budget configuration ID.""" + + display_name: Optional[str] = None + """Human-readable name of budget configuration. Max Length: 128""" + + filter: Optional[BudgetConfigurationFilter] = None + """Configured filters for this budget. These are applied to your account's usage to limit the scope + of what is considered for this budget. Leave empty to include all usage for this account. All + provided filters must be matched for usage to be included.""" def as_dict(self) -> dict: - """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration_id is not None: - body['log_delivery_configuration_id'] = self.log_delivery_configuration_id - if self.status is not None: body['status'] = self.status.value + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: + body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: + body['budget_configuration_id'] = self.budget_configuration_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest: - """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" - return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None), - status=_enum(d, 'status', LogDeliveryConfigStatus)) + def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationBudget: + """Deserializes the UpdateBudgetConfigurationBudget from a dictionary.""" + return cls(account_id=d.get('account_id', None), + alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), + budget_configuration_id=d.get('budget_configuration_id', None), + display_name=d.get('display_name', None), + filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) @dataclass -class UpdateResponse: +class UpdateBudgetConfigurationRequest: + budget: UpdateBudgetConfigurationBudget + """The updated budget. This will overwrite the budget specified by the budget ID.""" + + budget_id: Optional[str] = None + """The Databricks budget configuration ID.""" def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget: body['budget'] = self.budget.as_dict() + if self.budget_id is not None: body['budget_id'] = self.budget_id return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationRequest: + """Deserializes the UpdateBudgetConfigurationRequest from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', UpdateBudgetConfigurationBudget), + budget_id=d.get('budget_id', None)) @dataclass -class WrappedBudget: - budget: Budget - """Budget configuration to be created.""" - - budget_id: Optional[str] = None - """Budget ID""" +class UpdateBudgetConfigurationResponse: + budget: Optional[BudgetConfiguration] = None + """The updated budget.""" def as_dict(self) -> dict: - """Serializes the WrappedBudget into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.budget: body['budget'] = self.budget.as_dict() - if self.budget_id is not None: body['budget_id'] = self.budget_id return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> WrappedBudget: - """Deserializes the WrappedBudget from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', Budget), budget_id=d.get('budget_id', None)) + def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationResponse: + """Deserializes the UpdateBudgetConfigurationResponse from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) @dataclass -class WrappedBudgetWithStatus: - budget: BudgetWithStatus - """Budget configuration with daily status.""" +class UpdateLogDeliveryConfigurationStatusRequest: + status: LogDeliveryConfigStatus + """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). + Defaults to `ENABLED`. You can [enable or disable the + configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration + is not supported, so disable a log delivery configuration that is no longer needed.""" + + log_delivery_configuration_id: Optional[str] = None + """Databricks log delivery configuration ID""" def as_dict(self) -> dict: - """Serializes the WrappedBudgetWithStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: body['budget'] = self.budget.as_dict() + if self.log_delivery_configuration_id is not None: + body['log_delivery_configuration_id'] = self.log_delivery_configuration_id + if self.status is not None: body['status'] = self.status.value return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> WrappedBudgetWithStatus: - """Deserializes the WrappedBudgetWithStatus from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', BudgetWithStatus)) + def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest: + """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" + return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None), + status=_enum(d, 'status', LogDeliveryConfigStatus)) @dataclass @@ -767,39 +1002,42 @@ def download(self, class BudgetsAPI: - """These APIs manage budget configuration including notifications for exceeding a budget for a period. They - can also retrieve the status of each budget.""" + """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your + account. You can set up budgets to either track account-wide spending, or apply filters to track the + spending of specific teams, projects, or workspaces.""" def __init__(self, api_client): self._api = api_client - def create(self, budget: Budget) -> WrappedBudgetWithStatus: - """Create a new budget. + def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse: + """Create new budget. - Creates a new budget in the specified account. + Create a new budget configuration for an account. For full details, see + https://docs.databricks.com/en/admin/account-settings/budgets.html. - :param budget: :class:`Budget` - Budget configuration to be created. + :param budget: :class:`CreateBudgetConfigurationBudget` + Properties of the new budget configuration. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`CreateBudgetConfigurationResponse` """ body = {} if budget is not None: body['budget'] = budget.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', - f'/api/2.0/accounts/{self._api.account_id}/budget', + f'/api/2.1/accounts/{self._api.account_id}/budgets', body=body, headers=headers) - return WrappedBudgetWithStatus.from_dict(res) + return CreateBudgetConfigurationResponse.from_dict(res) def delete(self, budget_id: str): """Delete budget. - Deletes the budget specified by its UUID. + Deletes a budget configuration for an account. Both account and budget configuration are specified by + ID. This cannot be undone. :param budget_id: str - Budget ID + The Databricks budget configuration ID. """ @@ -807,63 +1045,78 @@ def delete(self, budget_id: str): headers = {'Accept': 'application/json', } self._api.do('DELETE', - f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}', + f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', headers=headers) - def get(self, budget_id: str) -> WrappedBudgetWithStatus: - """Get budget and its status. + def get(self, budget_id: str) -> GetBudgetConfigurationResponse: + """Get budget. - Gets the budget specified by its UUID, including noncumulative status for each day that the budget is - configured to include. + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - Budget ID + The Databricks budget configuration ID. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`GetBudgetConfigurationResponse` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}', + f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', headers=headers) - return WrappedBudgetWithStatus.from_dict(res) + return GetBudgetConfigurationResponse.from_dict(res) - def list(self) -> Iterator[BudgetWithStatus]: + def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]: """Get all budgets. - Gets all budgets associated with this account, including noncumulative status for each day that the - budget is configured to include. + Gets all budgets associated with this account. + + :param page_token: str (optional) + A page token received from a previous get all budget configurations call. This token can be used to + retrieve the subsequent page. Requests first page if absent. - :returns: Iterator over :class:`BudgetWithStatus` + :returns: Iterator over :class:`BudgetConfiguration` """ + query = {} + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/budget', headers=headers) - parsed = BudgetList.from_dict(json).budgets - return parsed if parsed is not None else [] - - def update(self, budget_id: str, budget: Budget): + while True: + json = self._api.do('GET', + f'/api/2.1/accounts/{self._api.account_id}/budgets', + query=query, + headers=headers) + if 'budgets' in json: + for v in json['budgets']: + yield BudgetConfiguration.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, budget_id: str, + budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse: """Modify budget. - Modifies a budget in this account. Budget properties are completely overwritten. + Updates a budget configuration for an account. Both account and budget configuration are specified by + ID. :param budget_id: str - Budget ID - :param budget: :class:`Budget` - Budget configuration to be created. - + The Databricks budget configuration ID. + :param budget: :class:`UpdateBudgetConfigurationBudget` + The updated budget. This will overwrite the budget specified by the budget ID. + :returns: :class:`UpdateBudgetConfigurationResponse` """ body = {} if budget is not None: body['budget'] = budget.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('PATCH', - f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}', - body=body, - headers=headers) + res = self._api.do('PUT', + f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', + body=body, + headers=headers) + return UpdateBudgetConfigurationResponse.from_dict(res) class LogDeliveryAPI: diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index e6456bc01..c6da9b8c5 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -2418,6 +2418,13 @@ class FunctionParameterType(Enum): PARAM = 'PARAM' +class GetBindingsSecurableType(Enum): + + CATALOG = 'catalog' + EXTERNAL_LOCATION = 'external_location' + STORAGE_CREDENTIAL = 'storage_credential' + + @dataclass class GetMetastoreSummaryResponse: cloud: Optional[str] = None @@ -3742,7 +3749,6 @@ class OnlineTableState(Enum): ONLINE_CONTINUOUS_UPDATE = 'ONLINE_CONTINUOUS_UPDATE' ONLINE_NO_PENDING_UPDATE = 'ONLINE_NO_PENDING_UPDATE' ONLINE_PIPELINE_FAILED = 'ONLINE_PIPELINE_FAILED' - ONLINE_TABLE_STATE_UNSPECIFIED = 'ONLINE_TABLE_STATE_UNSPECIFIED' ONLINE_TRIGGERED_UPDATE = 'ONLINE_TRIGGERED_UPDATE' ONLINE_UPDATING_PIPELINE_RESOURCES = 'ONLINE_UPDATING_PIPELINE_RESOURCES' PROVISIONING = 'PROVISIONING' @@ -3935,6 +3941,7 @@ class Privilege(Enum): CREATE_VIEW = 'CREATE_VIEW' CREATE_VOLUME = 'CREATE_VOLUME' EXECUTE = 'EXECUTE' + MANAGE = 'MANAGE' MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' MODIFY = 'MODIFY' READ_FILES = 'READ_FILES' @@ -4849,6 +4856,13 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse: return cls() +class UpdateBindingsSecurableType(Enum): + + CATALOG = 'catalog' + EXTERNAL_LOCATION = 'external_location' + STORAGE_CREDENTIAL = 'storage_credential' + + @dataclass class UpdateCatalog: comment: Optional[str] = None @@ -5492,8 +5506,8 @@ class UpdateWorkspaceBindingsParameters: securable_name: Optional[str] = None """The name of the securable.""" - securable_type: Optional[str] = None - """The type of the securable.""" + securable_type: Optional[UpdateBindingsSecurableType] = None + """The type of the securable to bind to a workspace.""" def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" @@ -5501,7 +5515,7 @@ def as_dict(self) -> dict: if self.add: body['add'] = [v.as_dict() for v in self.add] if self.remove: body['remove'] = [v.as_dict() for v in self.remove] if self.securable_name is not None: body['securable_name'] = self.securable_name - if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.securable_type is not None: body['securable_type'] = self.securable_type.value return body @classmethod @@ -5510,7 +5524,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters: return cls(add=_repeated_dict(d, 'add', WorkspaceBinding), remove=_repeated_dict(d, 'remove', WorkspaceBinding), securable_name=d.get('securable_name', None), - securable_type=d.get('securable_type', None)) + securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType)) @dataclass @@ -8172,7 +8186,7 @@ def create(self, res = self._api.do('POST', '/api/2.1/unity-catalog/schemas', body=body, headers=headers) return SchemaInfo.from_dict(res) - def delete(self, full_name: str): + def delete(self, full_name: str, *, force: Optional[bool] = None): """Delete a schema. Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an @@ -8180,13 +8194,17 @@ def delete(self, full_name: str): :param full_name: str Full name of the schema. + :param force: bool (optional) + Force deletion even if the schema is not empty. """ + query = {} + if force is not None: query['force'] = force headers = {'Accept': 'application/json', } - self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', headers=headers) + self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', query=query, headers=headers) def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> SchemaInfo: """Get a schema. @@ -9172,7 +9190,7 @@ class WorkspaceBindingsAPI: the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - Securables that support binding: - catalog""" + Securable types that support binding: - catalog - storage_credential - external_location""" def __init__(self, api_client): self._api = api_client @@ -9196,14 +9214,15 @@ def get(self, name: str) -> CurrentWorkspaceBindings: headers=headers) return CurrentWorkspaceBindings.from_dict(res) - def get_bindings(self, securable_type: str, securable_name: str) -> WorkspaceBindingsResponse: + def get_bindings(self, securable_type: GetBindingsSecurableType, + securable_name: str) -> WorkspaceBindingsResponse: """Get securable workspace bindings. Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`GetBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. @@ -9213,7 +9232,7 @@ def get_bindings(self, securable_type: str, securable_name: str) -> WorkspaceBin headers = {'Accept': 'application/json', } res = self._api.do('GET', - f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', + f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', headers=headers) return WorkspaceBindingsResponse.from_dict(res) @@ -9248,7 +9267,7 @@ def update(self, return CurrentWorkspaceBindings.from_dict(res) def update_bindings(self, - securable_type: str, + securable_type: UpdateBindingsSecurableType, securable_name: str, *, add: Optional[List[WorkspaceBinding]] = None, @@ -9258,8 +9277,8 @@ def update_bindings(self, Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`UpdateBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. :param add: List[:class:`WorkspaceBinding`] (optional) @@ -9275,7 +9294,7 @@ def update_bindings(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', - f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', + f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', body=body, headers=headers) return WorkspaceBindingsResponse.from_dict(res) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 4e6a02152..bbfda7891 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -4461,11 +4461,8 @@ class Library: """Specification of a CRAN library to be installed as part of the library""" egg: Optional[str] = None - """URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog Volumes - paths, and S3 URIs. For example: `{ "egg": "/Workspace/path/to/library.egg" }`, `{ "egg" : - "/Volumes/path/to/library.egg" }` or `{ "egg": "s3://my-bucket/library.egg" }`. If S3 is used, - please make sure the cluster has read access on the library. You may need to launch the cluster - with an IAM role to access the S3 URI.""" + """Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is + not supported in Databricks Runtime 14.0 and above.""" jar: Optional[str] = None """URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes @@ -4603,21 +4600,103 @@ def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse: return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None)) +@dataclass +class ListClustersFilterBy: + cluster_sources: Optional[List[ClusterSource]] = None + """The source of cluster creation.""" + + cluster_states: Optional[List[State]] = None + """The current state of the clusters.""" + + is_pinned: Optional[bool] = None + """Whether the clusters are pinned or not.""" + + policy_id: Optional[str] = None + """The ID of the cluster policy used to create the cluster if applicable.""" + + def as_dict(self) -> dict: + """Serializes the ListClustersFilterBy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster_sources: body['cluster_sources'] = [v.value for v in self.cluster_sources] + if self.cluster_states: body['cluster_states'] = [v.value for v in self.cluster_states] + if self.is_pinned is not None: body['is_pinned'] = self.is_pinned + if self.policy_id is not None: body['policy_id'] = self.policy_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListClustersFilterBy: + """Deserializes the ListClustersFilterBy from a dictionary.""" + return cls(cluster_sources=_repeated_enum(d, 'cluster_sources', ClusterSource), + cluster_states=_repeated_enum(d, 'cluster_states', State), + is_pinned=d.get('is_pinned', None), + policy_id=d.get('policy_id', None)) + + @dataclass class ListClustersResponse: clusters: Optional[List[ClusterDetails]] = None """""" + next_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the next page of results. If the value is + "", it means no further results for the request.""" + + prev_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the previous page of results. If the + value is "", it means no further results for the request.""" + def as_dict(self) -> dict: """Serializes the ListClustersResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListClustersResponse: """Deserializes the ListClustersResponse from a dictionary.""" - return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails)) + return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails), + next_page_token=d.get('next_page_token', None), + prev_page_token=d.get('prev_page_token', None)) + + +@dataclass +class ListClustersSortBy: + direction: Optional[ListClustersSortByDirection] = None + """The direction to sort by.""" + + field: Optional[ListClustersSortByField] = None + """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest + precedence: cluster state, pinned or unpinned, then cluster name.""" + + def as_dict(self) -> dict: + """Serializes the ListClustersSortBy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.direction is not None: body['direction'] = self.direction.value + if self.field is not None: body['field'] = self.field.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListClustersSortBy: + """Deserializes the ListClustersSortBy from a dictionary.""" + return cls(direction=_enum(d, 'direction', ListClustersSortByDirection), + field=_enum(d, 'field', ListClustersSortByField)) + + +class ListClustersSortByDirection(Enum): + """The direction to sort by.""" + + ASC = 'ASC' + DESC = 'DESC' + + +class ListClustersSortByField(Enum): + """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest + precedence: cluster state, pinned or unpinned, then cluster name.""" + + CLUSTER_NAME = 'CLUSTER_NAME' + DEFAULT = 'DEFAULT' @dataclass @@ -6174,9 +6253,8 @@ class ClustersAPI: restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - IMPORTANT: Databricks retains cluster configuration information for up to 200 all-purpose clusters - terminated in the last 30 days and up to 30 job clusters recently terminated by the job scheduler. To keep - an all-purpose cluster configuration even after it has been terminated for more than 30 days, an + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To + keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list.""" def __init__(self, api_client): @@ -6263,7 +6341,7 @@ def change_owner(self, cluster_id: str, owner_username: str): if owner_username is not None: body['owner_username'] = owner_username headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/change-owner', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/change-owner', body=body, headers=headers) def create(self, spark_version: str, @@ -6462,7 +6540,7 @@ def create(self, if workload_type is not None: body['workload_type'] = workload_type.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/create', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/create', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=CreateClusterResponse.from_dict(op_response), cluster_id=op_response['cluster_id']) @@ -6546,7 +6624,7 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]: if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/delete', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/delete', body=body, headers=headers) return Wait(self.wait_get_cluster_terminated, response=DeleteClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -6756,7 +6834,7 @@ def edit(self, if workload_type is not None: body['workload_type'] = workload_type.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/edit', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/edit', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -6867,7 +6945,7 @@ def events(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } while True: - json = self._api.do('POST', '/api/2.0/clusters/events', body=body, headers=headers) + json = self._api.do('POST', '/api/2.1/clusters/events', body=body, headers=headers) if 'events' in json: for v in json['events']: yield ClusterEvent.from_dict(v) @@ -6891,7 +6969,7 @@ def get(self, cluster_id: str) -> ClusterDetails: if cluster_id is not None: query['cluster_id'] = cluster_id headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/get', query=query, headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/get', query=query, headers=headers) return ClusterDetails.from_dict(res) def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse: @@ -6928,33 +7006,46 @@ def get_permissions(self, cluster_id: str) -> ClusterPermissions: res = self._api.do('GET', f'/api/2.0/permissions/clusters/{cluster_id}', headers=headers) return ClusterPermissions.from_dict(res) - def list(self, *, can_use_client: Optional[str] = None) -> Iterator[ClusterDetails]: - """List all clusters. - - Return information about all pinned clusters, active clusters, up to 200 of the most recently - terminated all-purpose clusters in the past 30 days, and up to 30 of the most recently terminated job - clusters in the past 30 days. - - For example, if there is 1 pinned cluster, 4 active clusters, 45 terminated all-purpose clusters in - the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1 - pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently - terminated job clusters. - - :param can_use_client: str (optional) - Filter clusters based on what type of client it can be used for. Could be either NOTEBOOKS or JOBS. - No input for this field will get all clusters in the workspace without filtering on its supported - client + def list(self, + *, + filter_by: Optional[ListClustersFilterBy] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_by: Optional[ListClustersSortBy] = None) -> Iterator[ClusterDetails]: + """List clusters. + + Return information about all pinned and active clusters, and all clusters terminated within the last + 30 days. Clusters terminated prior to this period are not included. + + :param filter_by: :class:`ListClustersFilterBy` (optional) + Filters to apply to the list of clusters. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + Use next_page_token or prev_page_token returned from the previous request to list the next or + previous page of clusters respectively. + :param sort_by: :class:`ListClustersSortBy` (optional) + Sort the list of clusters by a specific criteria. :returns: Iterator over :class:`ClusterDetails` """ query = {} - if can_use_client is not None: query['can_use_client'] = can_use_client + if filter_by is not None: query['filter_by'] = filter_by.as_dict() + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if sort_by is not None: query['sort_by'] = sort_by.as_dict() headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/clusters/list', query=query, headers=headers) - parsed = ListClustersResponse.from_dict(json).clusters - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/clusters/list', query=query, headers=headers) + if 'clusters' in json: + for v in json['clusters']: + yield ClusterDetails.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def list_node_types(self) -> ListNodeTypesResponse: """List node types. @@ -6966,7 +7057,7 @@ def list_node_types(self) -> ListNodeTypesResponse: headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/list-node-types', headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/list-node-types', headers=headers) return ListNodeTypesResponse.from_dict(res) def list_zones(self) -> ListAvailableZonesResponse: @@ -6980,7 +7071,7 @@ def list_zones(self) -> ListAvailableZonesResponse: headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/list-zones', headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/list-zones', headers=headers) return ListAvailableZonesResponse.from_dict(res) def permanent_delete(self, cluster_id: str): @@ -7001,7 +7092,7 @@ def permanent_delete(self, cluster_id: str): if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/permanent-delete', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/permanent-delete', body=body, headers=headers) def pin(self, cluster_id: str): """Pin cluster. @@ -7018,7 +7109,7 @@ def pin(self, cluster_id: str): if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/pin', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/pin', body=body, headers=headers) def resize(self, cluster_id: str, @@ -7055,7 +7146,7 @@ def resize(self, if num_workers is not None: body['num_workers'] = num_workers headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/resize', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/resize', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -7089,7 +7180,7 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai if restart_user is not None: body['restart_user'] = restart_user headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/restart', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/restart', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -7134,7 +7225,7 @@ def spark_versions(self) -> GetSparkVersionsResponse: headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/spark-versions', headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/spark-versions', headers=headers) return GetSparkVersionsResponse.from_dict(res) def start(self, cluster_id: str) -> Wait[ClusterDetails]: @@ -7158,7 +7249,7 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]: if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/start', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/start', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -7182,7 +7273,7 @@ def unpin(self, cluster_id: str): if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/unpin', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/unpin', body=body, headers=headers) def update_permissions( self, @@ -7209,7 +7300,8 @@ def update_permissions( class CommandExecutionAPI: - """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" + """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API + only supports (classic) all-purpose clusters. Serverless compute is not supported.""" def __init__(self, api_client): self._api = api_client diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index b24d03183..bf571dd49 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -3,14 +3,20 @@ from __future__ import annotations import logging +import random +import time from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Dict, Iterator, List, Optional +from typing import Callable, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger('databricks.sdk') +from databricks.sdk.service import sql + # all definitions in this file are in alphabetical order @@ -233,6 +239,242 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteSubscriptionResponse: return cls() +@dataclass +class GenieAttachment: + """Genie AI Response""" + + query: Optional[QueryAttachment] = None + + text: Optional[TextAttachment] = None + + def as_dict(self) -> dict: + """Serializes the GenieAttachment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.query: body['query'] = self.query.as_dict() + if self.text: body['text'] = self.text.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieAttachment: + """Deserializes the GenieAttachment from a dictionary.""" + return cls(query=_from_dict(d, 'query', QueryAttachment), text=_from_dict(d, 'text', TextAttachment)) + + +@dataclass +class GenieConversation: + id: str + """Conversation ID""" + + space_id: str + """Genie space ID""" + + user_id: int + """ID of the user who created the conversation""" + + title: str + """Conversation title""" + + created_timestamp: Optional[int] = None + """Timestamp when the message was created""" + + last_updated_timestamp: Optional[int] = None + """Timestamp when the message was last updated""" + + def as_dict(self) -> dict: + """Serializes the GenieConversation into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.space_id is not None: body['space_id'] = self.space_id + if self.title is not None: body['title'] = self.title + if self.user_id is not None: body['user_id'] = self.user_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieConversation: + """Deserializes the GenieConversation from a dictionary.""" + return cls(created_timestamp=d.get('created_timestamp', None), + id=d.get('id', None), + last_updated_timestamp=d.get('last_updated_timestamp', None), + space_id=d.get('space_id', None), + title=d.get('title', None), + user_id=d.get('user_id', None)) + + +@dataclass +class GenieCreateConversationMessageRequest: + content: str + """User message content.""" + + conversation_id: Optional[str] = None + """The ID associated with the conversation.""" + + space_id: Optional[str] = None + """The ID associated with the Genie space where the conversation is started.""" + + def as_dict(self) -> dict: + """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.space_id is not None: body['space_id'] = self.space_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieCreateConversationMessageRequest: + """Deserializes the GenieCreateConversationMessageRequest from a dictionary.""" + return cls(content=d.get('content', None), + conversation_id=d.get('conversation_id', None), + space_id=d.get('space_id', None)) + + +@dataclass +class GenieGetMessageQueryResultResponse: + statement_response: Optional[sql.StatementResponse] = None + """SQL Statement Execution response. See [Get status, manifest, and result first + chunk](:method:statementexecution/getstatement) for more details.""" + + def as_dict(self) -> dict: + """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.statement_response: body['statement_response'] = self.statement_response.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieGetMessageQueryResultResponse: + """Deserializes the GenieGetMessageQueryResultResponse from a dictionary.""" + return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse)) + + +@dataclass +class GenieMessage: + id: str + """Message ID""" + + space_id: str + """Genie space ID""" + + conversation_id: str + """Conversation ID""" + + content: str + """User message content""" + + attachments: Optional[List[GenieAttachment]] = None + """AI produced response to the message""" + + created_timestamp: Optional[int] = None + """Timestamp when the message was created""" + + error: Optional[MessageError] = None + """Error message if AI failed to respond to the message""" + + last_updated_timestamp: Optional[int] = None + """Timestamp when the message was last updated""" + + query_result: Optional[Result] = None + """The result of SQL query if the message has a query attachment""" + + status: Optional[MessageStatus] = None + """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data + sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * + `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message + processing is completed. Results are in the `attachments` field. Get the SQL query result by + calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message + has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user + needs to execute the query again. * `CANCELLED`: Message has been cancelled.""" + + user_id: Optional[int] = None + """ID of the user who created the message""" + + def as_dict(self) -> dict: + """Serializes the GenieMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.attachments: body['attachments'] = [v.as_dict() for v in self.attachments] + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.error: body['error'] = self.error.as_dict() + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query_result: body['query_result'] = self.query_result.as_dict() + if self.space_id is not None: body['space_id'] = self.space_id + if self.status is not None: body['status'] = self.status.value + if self.user_id is not None: body['user_id'] = self.user_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieMessage: + """Deserializes the GenieMessage from a dictionary.""" + return cls(attachments=_repeated_dict(d, 'attachments', GenieAttachment), + content=d.get('content', None), + conversation_id=d.get('conversation_id', None), + created_timestamp=d.get('created_timestamp', None), + error=_from_dict(d, 'error', MessageError), + id=d.get('id', None), + last_updated_timestamp=d.get('last_updated_timestamp', None), + query_result=_from_dict(d, 'query_result', Result), + space_id=d.get('space_id', None), + status=_enum(d, 'status', MessageStatus), + user_id=d.get('user_id', None)) + + +@dataclass +class GenieStartConversationMessageRequest: + content: str + """The text of the message that starts the conversation.""" + + space_id: Optional[str] = None + """The ID associated with the Genie space where you want to start a conversation.""" + + def as_dict(self) -> dict: + """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.space_id is not None: body['space_id'] = self.space_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationMessageRequest: + """Deserializes the GenieStartConversationMessageRequest from a dictionary.""" + return cls(content=d.get('content', None), space_id=d.get('space_id', None)) + + +@dataclass +class GenieStartConversationResponse: + message_id: str + """Message ID""" + + conversation_id: str + """Conversation ID""" + + conversation: Optional[GenieConversation] = None + + message: Optional[GenieMessage] = None + + def as_dict(self) -> dict: + """Serializes the GenieStartConversationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.conversation: body['conversation'] = self.conversation.as_dict() + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.message: body['message'] = self.message.as_dict() + if self.message_id is not None: body['message_id'] = self.message_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse: + """Deserializes the GenieStartConversationResponse from a dictionary.""" + return cls(conversation=_from_dict(d, 'conversation', GenieConversation), + conversation_id=d.get('conversation_id', None), + message=_from_dict(d, 'message', GenieMessage), + message_id=d.get('message_id', None)) + + class LifecycleState(Enum): ACTIVE = 'ACTIVE' @@ -305,6 +547,88 @@ def from_dict(cls, d: Dict[str, any]) -> ListSubscriptionsResponse: subscriptions=_repeated_dict(d, 'subscriptions', Subscription)) +@dataclass +class MessageError: + error: Optional[str] = None + + type: Optional[MessageErrorType] = None + + def as_dict(self) -> dict: + """Serializes the MessageError into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.error is not None: body['error'] = self.error + if self.type is not None: body['type'] = self.type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MessageError: + """Deserializes the MessageError from a dictionary.""" + return cls(error=d.get('error', None), type=_enum(d, 'type', MessageErrorType)) + + +class MessageErrorType(Enum): + + BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = 'BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION' + CHAT_COMPLETION_CLIENT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_EXCEPTION' + CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION' + CHAT_COMPLETION_NETWORK_EXCEPTION = 'CHAT_COMPLETION_NETWORK_EXCEPTION' + CONTENT_FILTER_EXCEPTION = 'CONTENT_FILTER_EXCEPTION' + CONTEXT_EXCEEDED_EXCEPTION = 'CONTEXT_EXCEEDED_EXCEPTION' + COULD_NOT_GET_UC_SCHEMA_EXCEPTION = 'COULD_NOT_GET_UC_SCHEMA_EXCEPTION' + DEPLOYMENT_NOT_FOUND_EXCEPTION = 'DEPLOYMENT_NOT_FOUND_EXCEPTION' + FUNCTIONS_NOT_AVAILABLE_EXCEPTION = 'FUNCTIONS_NOT_AVAILABLE_EXCEPTION' + FUNCTION_ARGUMENTS_INVALID_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_EXCEPTION' + FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION' + FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = 'FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION' + GENERIC_CHAT_COMPLETION_EXCEPTION = 'GENERIC_CHAT_COMPLETION_EXCEPTION' + GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = 'GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION' + GENERIC_SQL_EXEC_API_CALL_EXCEPTION = 'GENERIC_SQL_EXEC_API_CALL_EXCEPTION' + ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = 'ILLEGAL_PARAMETER_DEFINITION_EXCEPTION' + INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION' + INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION' + INVALID_CHAT_COMPLETION_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_JSON_EXCEPTION' + INVALID_COMPLETION_REQUEST_EXCEPTION = 'INVALID_COMPLETION_REQUEST_EXCEPTION' + INVALID_FUNCTION_CALL_EXCEPTION = 'INVALID_FUNCTION_CALL_EXCEPTION' + INVALID_TABLE_IDENTIFIER_EXCEPTION = 'INVALID_TABLE_IDENTIFIER_EXCEPTION' + LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION' + MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION' + MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION' + NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION' + RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION' + RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION' + REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION' + RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION' + SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION' + TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION' + TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION' + TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION' + UNEXPECTED_REPLY_PROCESS_EXCEPTION = 'UNEXPECTED_REPLY_PROCESS_EXCEPTION' + UNKNOWN_AI_MODEL = 'UNKNOWN_AI_MODEL' + WAREHOUSE_ACCESS_MISSING_EXCEPTION = 'WAREHOUSE_ACCESS_MISSING_EXCEPTION' + WAREHOUSE_NOT_FOUND_EXCEPTION = 'WAREHOUSE_NOT_FOUND_EXCEPTION' + + +class MessageStatus(Enum): + """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data + sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * + `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message + processing is completed. Results are in the `attachments` field. Get the SQL query result by + calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message + has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user + needs to execute the query again. * `CANCELLED`: Message has been cancelled.""" + + ASKING_AI = 'ASKING_AI' + CANCELLED = 'CANCELLED' + COMPLETED = 'COMPLETED' + EXECUTING_QUERY = 'EXECUTING_QUERY' + FAILED = 'FAILED' + FETCHING_METADATA = 'FETCHING_METADATA' + QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED' + SUBMITTED = 'SUBMITTED' + + @dataclass class MigrateDashboardRequest: source_dashboard_id: str @@ -392,6 +716,72 @@ def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard: warehouse_id=d.get('warehouse_id', None)) +@dataclass +class QueryAttachment: + description: Optional[str] = None + """Description of the query""" + + instruction_id: Optional[str] = None + """If the query was created on an instruction (trusted asset) we link to the id""" + + instruction_title: Optional[str] = None + """Always store the title next to the id in case the original instruction title changes or the + instruction is deleted.""" + + last_updated_timestamp: Optional[int] = None + """Time when the user updated the query last""" + + query: Optional[str] = None + """AI generated SQL query""" + + title: Optional[str] = None + """Name of the query""" + + def as_dict(self) -> dict: + """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.instruction_id is not None: body['instruction_id'] = self.instruction_id + if self.instruction_title is not None: body['instruction_title'] = self.instruction_title + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query is not None: body['query'] = self.query + if self.title is not None: body['title'] = self.title + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueryAttachment: + """Deserializes the QueryAttachment from a dictionary.""" + return cls(description=d.get('description', None), + instruction_id=d.get('instruction_id', None), + instruction_title=d.get('instruction_title', None), + last_updated_timestamp=d.get('last_updated_timestamp', None), + query=d.get('query', None), + title=d.get('title', None)) + + +@dataclass +class Result: + row_count: Optional[int] = None + """Row count of the result""" + + statement_id: Optional[str] = None + """Statement Execution API statement id. Use [Get status, manifest, and result first + chunk](:method:statementexecution/getstatement) to get the full result data.""" + + def as_dict(self) -> dict: + """Serializes the Result into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.row_count is not None: body['row_count'] = self.row_count + if self.statement_id is not None: body['statement_id'] = self.statement_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Result: + """Deserializes the Result from a dictionary.""" + return cls(row_count=d.get('row_count', None), statement_id=d.get('statement_id', None)) + + @dataclass class Schedule: cron_schedule: CronSchedule @@ -565,6 +955,23 @@ def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser: return cls(user_id=d.get('user_id', None)) +@dataclass +class TextAttachment: + content: Optional[str] = None + """AI generated message""" + + def as_dict(self) -> dict: + """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: body['content'] = self.content + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> TextAttachment: + """Deserializes the TextAttachment from a dictionary.""" + return cls(content=d.get('content', None)) + + @dataclass class TrashDashboardResponse: @@ -675,6 +1082,193 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateScheduleRequest: schedule_id=d.get('schedule_id', None)) +class GenieAPI: + """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that + business users can use to ask questions using natural language. Genie uses data registered to Unity + Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks + Assistant must be enabled.""" + + def __init__(self, api_client): + self._api = api_client + + def wait_get_message_genie_completed( + self, + conversation_id: str, + message_id: str, + space_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage: + deadline = time.time() + timeout.total_seconds() + target_states = (MessageStatus.COMPLETED, ) + failure_states = (MessageStatus.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach COMPLETED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]: + """Create conversation message. + + Create new message in [conversation](:method:genie/startconversation). The AI response uses all + previously created messages in the conversation to respond. + + :param space_id: str + The ID associated with the Genie space where the conversation is started. + :param conversation_id: str + The ID associated with the conversation. + :param content: str + User message content. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + """ + body = {} + if content is not None: body['content'] = content + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do( + 'POST', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages', + body=body, + headers=headers) + return Wait(self.wait_get_message_genie_completed, + response=GenieMessage.from_dict(op_response), + conversation_id=conversation_id, + message_id=op_response['id'], + space_id=space_id) + + def create_message_and_wait(self, + space_id: str, + conversation_id: str, + content: str, + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.create_message(content=content, conversation_id=conversation_id, + space_id=space_id).result(timeout=timeout) + + def execute_message_query(self, space_id: str, conversation_id: str, + message_id: str) -> GenieGetMessageQueryResultResponse: + """Execute SQL query in a conversation message. + + Execute the SQL query in the message. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'POST', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query', + headers=headers) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: + """Get conversation message. + + Get message from conversation. + + :param space_id: str + The ID associated with the Genie space where the target conversation is located. + :param conversation_id: str + The ID associated with the target conversation. + :param message_id: str + The ID associated with the target message from the identified conversation. + + :returns: :class:`GenieMessage` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}', + headers=headers) + return GenieMessage.from_dict(res) + + def get_message_query_result(self, space_id: str, conversation_id: str, + message_id: str) -> GenieGetMessageQueryResultResponse: + """Get conversation message SQL query result. + + Get the result of SQL query if the message has a query attachment. This is only available if a message + has a query attachment and the message status is `EXECUTING_QUERY`. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result', + headers=headers) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: + """Start conversation. + + Start a new conversation. + + :param space_id: str + The ID associated with the Genie space where you want to start a conversation. + :param content: str + The text of the message that starts the conversation. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + """ + body = {} + if content is not None: body['content'] = content + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', + f'/api/2.0/genie/spaces/{space_id}/start-conversation', + body=body, + headers=headers) + return Wait(self.wait_get_message_genie_completed, + response=GenieStartConversationResponse.from_dict(op_response), + conversation_id=op_response['conversation_id'], + message_id=op_response['message_id'], + space_id=space_id) + + def start_conversation_and_wait(self, space_id: str, content: str, + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) + + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can be done with Workspace API (import, export, get-status, list, delete).""" diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 27f448ccb..b5cf91846 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -132,16 +132,16 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: @dataclass -class DeleteWorkspaceAssignments: +class DeleteWorkspacePermissionAssignmentResponse: def as_dict(self) -> dict: - """Serializes the DeleteWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteWorkspacePermissionAssignmentResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspaceAssignments: - """Deserializes the DeleteWorkspaceAssignments from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspacePermissionAssignmentResponse: + """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary.""" return cls() @@ -406,6 +406,56 @@ def from_dict(cls, d: Dict[str, any]) -> ListUsersResponse: total_results=d.get('totalResults', None)) +@dataclass +class MigratePermissionsRequest: + workspace_id: int + """WorkspaceId of the associated workspace where the permission migration will occur.""" + + from_workspace_group_name: str + """The name of the workspace group that permissions will be migrated from.""" + + to_account_group_name: str + """The name of the account group that permissions will be migrated to.""" + + size: Optional[int] = None + """The maximum number of permissions that will be migrated.""" + + def as_dict(self) -> dict: + """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.from_workspace_group_name is not None: + body['from_workspace_group_name'] = self.from_workspace_group_name + if self.size is not None: body['size'] = self.size + if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsRequest: + """Deserializes the MigratePermissionsRequest from a dictionary.""" + return cls(from_workspace_group_name=d.get('from_workspace_group_name', None), + size=d.get('size', None), + to_account_group_name=d.get('to_account_group_name', None), + workspace_id=d.get('workspace_id', None)) + + +@dataclass +class MigratePermissionsResponse: + permissions_migrated: Optional[int] = None + """Number of permissions migrated.""" + + def as_dict(self) -> dict: + """Serializes the MigratePermissionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsResponse: + """Deserializes the MigratePermissionsResponse from a dictionary.""" + return cls(permissions_migrated=d.get('permissions_migrated', None)) + + @dataclass class Name: family_name: Optional[str] = None @@ -723,6 +773,9 @@ def from_dict(cls, d: Dict[str, any]) -> Permission: @dataclass class PermissionAssignment: + """The output format for existing workspace PermissionAssignment records, which contains some info + for user consumption.""" + error: Optional[str] = None """Error response associated with a workspace permission assignment, if any.""" @@ -787,57 +840,6 @@ class PermissionLevel(Enum): IS_OWNER = 'IS_OWNER' -@dataclass -class PermissionMigrationRequest: - workspace_id: int - """WorkspaceId of the associated workspace where the permission migration will occur. Both - workspace group and account group must be in this workspace.""" - - from_workspace_group_name: str - """The name of the workspace group that permissions will be migrated from.""" - - to_account_group_name: str - """The name of the account group that permissions will be migrated to.""" - - size: Optional[int] = None - """The maximum number of permissions that will be migrated.""" - - def as_dict(self) -> dict: - """Serializes the PermissionMigrationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.from_workspace_group_name is not None: - body['from_workspace_group_name'] = self.from_workspace_group_name - if self.size is not None: body['size'] = self.size - if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> PermissionMigrationRequest: - """Deserializes the PermissionMigrationRequest from a dictionary.""" - return cls(from_workspace_group_name=d.get('from_workspace_group_name', None), - size=d.get('size', None), - to_account_group_name=d.get('to_account_group_name', None), - workspace_id=d.get('workspace_id', None)) - - -@dataclass -class PermissionMigrationResponse: - permissions_migrated: Optional[int] = None - """Number of permissions migrated.""" - - def as_dict(self) -> dict: - """Serializes the PermissionMigrationResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> PermissionMigrationResponse: - """Deserializes the PermissionMigrationResponse from a dictionary.""" - return cls(permissions_migrated=d.get('permissions_migrated', None)) - - @dataclass class PermissionOutput: description: Optional[str] = None @@ -911,6 +913,8 @@ def from_dict(cls, d: Dict[str, any]) -> PermissionsRequest: @dataclass class PrincipalOutput: + """Information about the principal assigned to the workspace.""" + display_name: Optional[str] = None """The display name of the principal.""" @@ -1134,7 +1138,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest: @dataclass class UpdateWorkspaceAssignments: - permissions: List[WorkspacePermission] + permissions: Optional[List[WorkspacePermission]] = None """Array of permissions assignments to update on the workspace. Note that excluding this field will have the same effect as providing an empty list which will result in the deletion of all permissions for the principal.""" @@ -1143,7 +1147,7 @@ class UpdateWorkspaceAssignments: """The ID of the user, service principal, or group.""" workspace_id: Optional[int] = None - """The workspace ID.""" + """The workspace ID for the account.""" def as_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" @@ -2495,7 +2499,7 @@ def update(self, class PermissionMigrationAPI: - """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx.""" + """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" def __init__(self, api_client): self._api = api_client @@ -2505,14 +2509,11 @@ def migrate_permissions(self, from_workspace_group_name: str, to_account_group_name: str, *, - size: Optional[int] = None) -> PermissionMigrationResponse: + size: Optional[int] = None) -> MigratePermissionsResponse: """Migrate Permissions. - Migrate a batch of permissions from a workspace local group to an account group. - :param workspace_id: int - WorkspaceId of the associated workspace where the permission migration will occur. Both workspace - group and account group must be in this workspace. + WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str The name of the workspace group that permissions will be migrated from. :param to_account_group_name: str @@ -2520,7 +2521,7 @@ def migrate_permissions(self, :param size: int (optional) The maximum number of permissions that will be migrated. - :returns: :class:`PermissionMigrationResponse` + :returns: :class:`MigratePermissionsResponse` """ body = {} if from_workspace_group_name is not None: @@ -2531,7 +2532,7 @@ def migrate_permissions(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/permissionmigration', body=body, headers=headers) - return PermissionMigrationResponse.from_dict(res) + return MigratePermissionsResponse.from_dict(res) class PermissionsAPI: @@ -3313,7 +3314,7 @@ def delete(self, workspace_id: int, principal_id: int): principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. @@ -3366,18 +3367,21 @@ def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: parsed = PermissionAssignments.from_dict(json).permission_assignments return parsed if parsed is not None else [] - def update(self, workspace_id: int, principal_id: int, - permissions: List[WorkspacePermission]) -> PermissionAssignment: + def update(self, + workspace_id: int, + principal_id: int, + *, + permissions: Optional[List[WorkspacePermission]] = None) -> PermissionAssignment: """Create or update permissions assignment. Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - :param permissions: List[:class:`WorkspacePermission`] + :param permissions: List[:class:`WorkspacePermission`] (optional) Array of permissions assignments to update on the workspace. Note that excluding this field will have the same effect as providing an empty list which will result in the deletion of all permissions for the principal. diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index f96d7dd75..cf677fd06 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -480,7 +480,7 @@ class CreateJob: """Deployment information for jobs managed by external sources.""" description: Optional[str] = None - """An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.""" + """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" edit_mode: Optional[JobEditMode] = None """Edit mode of the job. @@ -1601,7 +1601,7 @@ class JobSettings: """Deployment information for jobs managed by external sources.""" description: Optional[str] = None - """An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.""" + """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" edit_mode: Optional[JobEditMode] = None """Edit mode of the job. @@ -2055,7 +2055,6 @@ class PeriodicTriggerConfigurationTimeUnit(Enum): DAYS = 'DAYS' HOURS = 'HOURS' - TIME_UNIT_UNSPECIFIED = 'TIME_UNIT_UNSPECIFIED' WEEKS = 'WEEKS' @@ -5192,7 +5191,7 @@ def create(self, :param deployment: :class:`JobDeployment` (optional) Deployment information for jobs managed by external sources. :param description: str (optional) - An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding. + An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 57cd4f38f..1a2dedf31 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -56,7 +56,6 @@ class AssetType(Enum): ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA' ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL' ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK' - ASSET_TYPE_UNSPECIFIED = 'ASSET_TYPE_UNSPECIFIED' @dataclass @@ -804,11 +803,6 @@ class FileStatus(Enum): FILE_STATUS_STAGING = 'FILE_STATUS_STAGING' -class FilterType(Enum): - - METASTORE = 'METASTORE' - - class FulfillmentType(Enum): INSTALL = 'INSTALL' @@ -1297,16 +1291,11 @@ class Listing: id: Optional[str] = None - provider_summary: Optional[ProviderListingSummaryInfo] = None - """we can not use just ProviderListingSummary since we already have same name on entity side of the - state""" - def as_dict(self) -> dict: """Serializes the Listing into a dictionary suitable for use as a JSON request body.""" body = {} if self.detail: body['detail'] = self.detail.as_dict() if self.id is not None: body['id'] = self.id - if self.provider_summary: body['provider_summary'] = self.provider_summary.as_dict() if self.summary: body['summary'] = self.summary.as_dict() return body @@ -1315,7 +1304,6 @@ def from_dict(cls, d: Dict[str, any]) -> Listing: """Deserializes the Listing from a dictionary.""" return cls(detail=_from_dict(d, 'detail', ListingDetail), id=d.get('id', None), - provider_summary=_from_dict(d, 'provider_summary', ProviderListingSummaryInfo), summary=_from_dict(d, 'summary', ListingSummary)) @@ -1461,23 +1449,18 @@ def from_dict(cls, d: Dict[str, any]) -> ListingFulfillment: @dataclass class ListingSetting: - filters: Optional[List[VisibilityFilter]] = None - """filters are joined with `or` conjunction.""" - visibility: Optional[Visibility] = None def as_dict(self) -> dict: """Serializes the ListingSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filters: body['filters'] = [v.as_dict() for v in self.filters] if self.visibility is not None: body['visibility'] = self.visibility.value return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListingSetting: """Deserializes the ListingSetting from a dictionary.""" - return cls(filters=_repeated_dict(d, 'filters', VisibilityFilter), - visibility=_enum(d, 'visibility', Visibility)) + return cls(visibility=_enum(d, 'visibility', Visibility)) class ListingShareType(Enum): @@ -1517,8 +1500,6 @@ class ListingSummary: """if a git repo is being created, a listing will be initialized with this field as opposed to a share""" - metastore_id: Optional[str] = None - provider_id: Optional[str] = None provider_region: Optional[RegionInfo] = None @@ -1552,7 +1533,6 @@ def as_dict(self) -> dict: if self.exchange_ids: body['exchange_ids'] = [v for v in self.exchange_ids] if self.git_repo: body['git_repo'] = self.git_repo.as_dict() if self.listing_type is not None: body['listingType'] = self.listing_type.value - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name if self.provider_id is not None: body['provider_id'] = self.provider_id if self.provider_region: body['provider_region'] = self.provider_region.as_dict() @@ -1577,7 +1557,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListingSummary: exchange_ids=d.get('exchange_ids', None), git_repo=_from_dict(d, 'git_repo', RepoInfo), listing_type=_enum(d, 'listingType', ListingType), - metastore_id=d.get('metastore_id', None), name=d.get('name', None), provider_id=d.get('provider_id', None), provider_region=_from_dict(d, 'provider_region', RegionInfo), @@ -1617,7 +1596,6 @@ class ListingTagType(Enum): LISTING_TAG_TYPE_LANGUAGE = 'LISTING_TAG_TYPE_LANGUAGE' LISTING_TAG_TYPE_TASK = 'LISTING_TAG_TYPE_TASK' - LISTING_TAG_TYPE_UNSPECIFIED = 'LISTING_TAG_TYPE_UNSPECIFIED' class ListingType(Enum): @@ -1733,37 +1711,6 @@ def from_dict(cls, d: Dict[str, any]) -> ProviderAnalyticsDashboard: return cls(id=d.get('id', None)) -@dataclass -class ProviderIconFile: - icon_file_id: Optional[str] = None - - icon_file_path: Optional[str] = None - - icon_type: Optional[ProviderIconType] = None - - def as_dict(self) -> dict: - """Serializes the ProviderIconFile into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id - if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path - if self.icon_type is not None: body['icon_type'] = self.icon_type.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ProviderIconFile: - """Deserializes the ProviderIconFile from a dictionary.""" - return cls(icon_file_id=d.get('icon_file_id', None), - icon_file_path=d.get('icon_file_path', None), - icon_type=_enum(d, 'icon_type', ProviderIconType)) - - -class ProviderIconType(Enum): - - DARK = 'DARK' - PRIMARY = 'PRIMARY' - PROVIDER_ICON_TYPE_UNSPECIFIED = 'PROVIDER_ICON_TYPE_UNSPECIFIED' - - @dataclass class ProviderInfo: name: str @@ -1837,33 +1784,6 @@ def from_dict(cls, d: Dict[str, any]) -> ProviderInfo: term_of_service_link=d.get('term_of_service_link', None)) -@dataclass -class ProviderListingSummaryInfo: - """we can not use just ProviderListingSummary since we already have same name on entity side of the - state""" - - description: Optional[str] = None - - icon_files: Optional[List[ProviderIconFile]] = None - - name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the ProviderListingSummaryInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.icon_files: body['icon_files'] = [v.as_dict() for v in self.icon_files] - if self.name is not None: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ProviderListingSummaryInfo: - """Deserializes the ProviderListingSummaryInfo from a dictionary.""" - return cls(description=d.get('description', None), - icon_files=_repeated_dict(d, 'icon_files', ProviderIconFile), - name=d.get('name', None)) - - @dataclass class RegionInfo: cloud: Optional[str] = None @@ -1996,14 +1916,6 @@ def from_dict(cls, d: Dict[str, any]) -> SharedDataObject: return cls(data_object_type=d.get('data_object_type', None), name=d.get('name', None)) -class SortBy(Enum): - - SORT_BY_DATE = 'SORT_BY_DATE' - SORT_BY_RELEVANCE = 'SORT_BY_RELEVANCE' - SORT_BY_TITLE = 'SORT_BY_TITLE' - SORT_BY_UNSPECIFIED = 'SORT_BY_UNSPECIFIED' - - @dataclass class TokenDetail: bearer_token: Optional[str] = None @@ -2369,25 +2281,6 @@ class Visibility(Enum): PUBLIC = 'PUBLIC' -@dataclass -class VisibilityFilter: - filter_type: Optional[FilterType] = None - - filter_value: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the VisibilityFilter into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter_type is not None: body['filterType'] = self.filter_type.value - if self.filter_value is not None: body['filterValue'] = self.filter_value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> VisibilityFilter: - """Deserializes the VisibilityFilter from a dictionary.""" - return cls(filter_type=_enum(d, 'filterType', FilterType), filter_value=d.get('filterValue', None)) - - class ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" @@ -2667,14 +2560,12 @@ def list(self, *, assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, - is_ascending: Optional[bool] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, is_staff_pick: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, provider_ids: Optional[List[str]] = None, - sort_by: Optional[SortBy] = None, tags: Optional[List[ListingTag]] = None) -> Iterator[Listing]: """List listings. @@ -2684,7 +2575,6 @@ def list(self, Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) Filters each listing based on if it is free. :param is_private_exchange: bool (optional) @@ -2695,8 +2585,6 @@ def list(self, :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) - Criteria for sorting the resulting set of listings. :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags @@ -2706,14 +2594,12 @@ def list(self, query = {} if assets is not None: query['assets'] = [v.value for v in assets] if categories is not None: query['categories'] = [v.value for v in categories] - if is_ascending is not None: query['is_ascending'] = is_ascending if is_free is not None: query['is_free'] = is_free if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange if is_staff_pick is not None: query['is_staff_pick'] = is_staff_pick if page_size is not None: query['page_size'] = page_size if page_token is not None: query['page_token'] = page_token if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] - if sort_by is not None: query['sort_by'] = sort_by.value if tags is not None: query['tags'] = [v.as_dict() for v in tags] headers = {'Accept': 'application/json', } @@ -2731,13 +2617,11 @@ def search(self, *, assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, - is_ascending: Optional[bool] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, - provider_ids: Optional[List[str]] = None, - sort_by: Optional[SortBy] = None) -> Iterator[Listing]: + provider_ids: Optional[List[str]] = None) -> Iterator[Listing]: """Search listings. Search published listings in the Databricks Marketplace that the consumer has access to. This query @@ -2749,14 +2633,12 @@ def search(self, Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) :param is_private_exchange: bool (optional) :param page_size: int (optional) :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) :returns: Iterator over :class:`Listing` """ @@ -2764,14 +2646,12 @@ def search(self, query = {} if assets is not None: query['assets'] = [v.value for v in assets] if categories is not None: query['categories'] = [v.value for v in categories] - if is_ascending is not None: query['is_ascending'] = is_ascending if is_free is not None: query['is_free'] = is_free if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange if page_size is not None: query['page_size'] = page_size if page_token is not None: query['page_token'] = page_token if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] if query is not None: query['query'] = query - if sort_by is not None: query['sort_by'] = sort_by.value headers = {'Accept': 'application/json', } while True: diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 44132ee88..0c439ae7e 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -15,14 +15,14 @@ @dataclass class CreateCustomAppIntegration: - name: str - """name of the custom oauth app""" + confidential: Optional[bool] = None + """This field indicates whether an OAuth client secret is required to authenticate this client.""" - redirect_urls: List[str] - """List of oauth redirect urls""" + name: Optional[str] = None + """Name of the custom OAuth app""" - confidential: Optional[bool] = None - """indicates if an oauth client-secret should be generated""" + redirect_urls: Optional[List[str]] = None + """List of OAuth redirect urls""" scopes: Optional[List[str]] = None """OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, @@ -54,14 +54,14 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration: @dataclass class CreateCustomAppIntegrationOutput: client_id: Optional[str] = None - """oauth client-id generated by the Databricks""" + """OAuth client-id generated by the Databricks""" client_secret: Optional[str] = None - """oauth client-secret generated by the Databricks if this is a confidential oauth app + """OAuth client-secret generated by the Databricks. If this is a confidential OAuth app client-secret will be generated.""" integration_id: Optional[str] = None - """unique integration id for the custom oauth app""" + """Unique integration id for the custom OAuth app""" def as_dict(self) -> dict: """Serializes the CreateCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" @@ -82,7 +82,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegrationOutput: @dataclass class CreatePublishedAppIntegration: app_id: Optional[str] = None - """app_id of the oauth published app integration. For example power-bi, tableau-deskop""" + """App id of the OAuth published app integration. For example power-bi, tableau-deskop""" token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" @@ -104,7 +104,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegration: @dataclass class CreatePublishedAppIntegrationOutput: integration_id: Optional[str] = None - """unique integration id for the published oauth app""" + """Unique integration id for the published OAuth app""" def as_dict(self) -> dict: """Serializes the CreatePublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" @@ -227,19 +227,27 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: @dataclass class GetCustomAppIntegrationOutput: client_id: Optional[str] = None - """oauth client id of the custom oauth app""" + """The client id of the custom OAuth app""" confidential: Optional[bool] = None - """indicates if an oauth client-secret should be generated""" + """This field indicates whether an OAuth client secret is required to authenticate this client.""" + + create_time: Optional[str] = None + + created_by: Optional[int] = None + + creator_username: Optional[str] = None integration_id: Optional[str] = None """ID of this custom app""" name: Optional[str] = None - """name of the custom oauth app""" + """The display name of the custom OAuth app""" redirect_urls: Optional[List[str]] = None - """List of oauth redirect urls""" + """List of OAuth redirect urls""" + + scopes: Optional[List[str]] = None token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" @@ -249,9 +257,13 @@ def as_dict(self) -> dict: body = {} if self.client_id is not None: body['client_id'] = self.client_id if self.confidential is not None: body['confidential'] = self.confidential + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.creator_username is not None: body['creator_username'] = self.creator_username if self.integration_id is not None: body['integration_id'] = self.integration_id if self.name is not None: body['name'] = self.name if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] + if self.scopes: body['scopes'] = [v for v in self.scopes] if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body @@ -260,39 +272,51 @@ def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput: """Deserializes the GetCustomAppIntegrationOutput from a dictionary.""" return cls(client_id=d.get('client_id', None), confidential=d.get('confidential', None), + create_time=d.get('create_time', None), + created_by=d.get('created_by', None), + creator_username=d.get('creator_username', None), integration_id=d.get('integration_id', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), + scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) @dataclass class GetCustomAppIntegrationsOutput: apps: Optional[List[GetCustomAppIntegrationOutput]] = None - """Array of Custom OAuth App Integrations defined for the account.""" + """List of Custom OAuth App Integrations defined for the account.""" + + next_page_token: Optional[str] = None def as_dict(self) -> dict: """Serializes the GetCustomAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationsOutput: """Deserializes the GetCustomAppIntegrationsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput)) + return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput), + next_page_token=d.get('next_page_token', None)) @dataclass class GetPublishedAppIntegrationOutput: app_id: Optional[str] = None - """app-id of the published app integration""" + """App-id of the published app integration""" + + create_time: Optional[str] = None + + created_by: Optional[int] = None integration_id: Optional[str] = None - """unique integration id for the published oauth app""" + """Unique integration id for the published OAuth app""" name: Optional[str] = None - """name of the published oauth app""" + """Display name of the published OAuth app""" token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" @@ -301,6 +325,8 @@ def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} if self.app_id is not None: body['app_id'] = self.app_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by if self.integration_id is not None: body['integration_id'] = self.integration_id if self.name is not None: body['name'] = self.name if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() @@ -310,6 +336,8 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput: """Deserializes the GetPublishedAppIntegrationOutput from a dictionary.""" return cls(app_id=d.get('app_id', None), + create_time=d.get('create_time', None), + created_by=d.get('created_by', None), integration_id=d.get('integration_id', None), name=d.get('name', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) @@ -318,24 +346,28 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput: @dataclass class GetPublishedAppIntegrationsOutput: apps: Optional[List[GetPublishedAppIntegrationOutput]] = None - """Array of Published OAuth App Integrations defined for the account.""" + """List of Published OAuth App Integrations defined for the account.""" + + next_page_token: Optional[str] = None def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationsOutput: """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput)) + return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput), + next_page_token=d.get('next_page_token', None)) @dataclass class GetPublishedAppsOutput: apps: Optional[List[PublishedAppOutput]] = None - """Array of Published OAuth Apps.""" + """List of Published OAuth Apps.""" next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more @@ -388,7 +420,7 @@ class PublishedAppOutput: apps.""" name: Optional[str] = None - """Name of the published OAuth app.""" + """The display name of the published OAuth app.""" redirect_urls: Optional[List[str]] = None """Redirect URLs of the published OAuth app.""" @@ -485,13 +517,12 @@ def from_dict(cls, d: Dict[str, any]) -> TokenAccessPolicy: @dataclass class UpdateCustomAppIntegration: integration_id: Optional[str] = None - """The oauth app integration ID.""" redirect_urls: Optional[List[str]] = None - """List of oauth redirect urls to be updated in the custom oauth app integration""" + """List of OAuth redirect urls to be updated in the custom OAuth app integration""" token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the custom oauth app integration""" + """Token access policy to be updated in the custom OAuth app integration""" def as_dict(self) -> dict: """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" @@ -526,10 +557,9 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegrationOutput: @dataclass class UpdatePublishedAppIntegration: integration_id: Optional[str] = None - """The oauth app integration ID.""" token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the published oauth app integration""" + """Token access policy to be updated in the published OAuth app integration""" def as_dict(self) -> dict: """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" @@ -560,31 +590,31 @@ def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput: class CustomAppIntegrationAPI: - """These APIs enable administrators to manage custom oauth app integrations, which is required for + """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" def __init__(self, api_client): self._api = api_client def create(self, - name: str, - redirect_urls: List[str], *, confidential: Optional[bool] = None, + name: Optional[str] = None, + redirect_urls: Optional[List[str]] = None, scopes: Optional[List[str]] = None, token_access_policy: Optional[TokenAccessPolicy] = None) -> CreateCustomAppIntegrationOutput: """Create Custom OAuth App Integration. Create Custom OAuth App Integration. - You can retrieve the custom oauth app integration via :method:CustomAppIntegration/get. + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - :param name: str - name of the custom oauth app - :param redirect_urls: List[str] - List of oauth redirect urls :param confidential: bool (optional) - indicates if an oauth client-secret should be generated + This field indicates whether an OAuth client secret is required to authenticate this client. + :param name: str (optional) + Name of the custom OAuth app + :param redirect_urls: List[str] (optional) + List of OAuth redirect urls :param scopes: List[str] (optional) OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid, profile, email. @@ -610,11 +640,10 @@ def create(self, def delete(self, integration_id: str): """Delete Custom OAuth App Integration. - Delete an existing Custom OAuth App Integration. You can retrieve the custom oauth app integration via + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. """ @@ -632,7 +661,6 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` """ @@ -645,21 +673,39 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: headers=headers) return GetCustomAppIntegrationOutput.from_dict(res) - def list(self) -> Iterator[GetCustomAppIntegrationOutput]: + def list(self, + *, + include_creator_username: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[GetCustomAppIntegrationOutput]: """Get custom oauth app integrations. - Get the list of custom oauth app integrations for the specified Databricks account + Get the list of custom OAuth app integrations for the specified Databricks account + + :param include_creator_username: bool (optional) + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetCustomAppIntegrationOutput` """ + query = {} + if include_creator_username is not None: query['include_creator_username'] = include_creator_username + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', - headers=headers) - parsed = GetCustomAppIntegrationsOutput.from_dict(json).apps - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', + query=query, + headers=headers) + if 'apps' in json: + for v in json['apps']: + yield GetCustomAppIntegrationOutput.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def update(self, integration_id: str, @@ -668,15 +714,14 @@ def update(self, token_access_policy: Optional[TokenAccessPolicy] = None): """Updates Custom OAuth App Integration. - Updates an existing custom OAuth App Integration. You can retrieve the custom oauth app integration + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param redirect_urls: List[str] (optional) - List of oauth redirect urls to be updated in the custom oauth app integration + List of OAuth redirect urls to be updated in the custom OAuth app integration :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the custom oauth app integration + Token access policy to be updated in the custom OAuth app integration """ @@ -709,7 +754,7 @@ def list(self, Get all the available published OAuth apps in Databricks. :param page_size: int (optional) - The max number of OAuth published apps to return. + The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. @@ -723,7 +768,7 @@ def list(self, while True: json = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps/', + f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps', query=query, headers=headers) if 'apps' in json: @@ -735,7 +780,7 @@ def list(self, class PublishedAppIntegrationAPI: - """These APIs enable administrators to manage published oauth app integrations, which is required for + """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" def __init__(self, api_client): @@ -750,10 +795,10 @@ def create( Create Published OAuth App Integration. - You can retrieve the published oauth app integration via :method:PublishedAppIntegration/get. + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param app_id: str (optional) - app_id of the oauth published app integration. For example power-bi, tableau-deskop + App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy @@ -773,11 +818,10 @@ def create( def delete(self, integration_id: str): """Delete Published OAuth App Integration. - Delete an existing Published OAuth App Integration. You can retrieve the published oauth app + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. """ @@ -795,7 +839,6 @@ def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: Gets the Published OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetPublishedAppIntegrationOutput` """ @@ -808,32 +851,46 @@ def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: headers=headers) return GetPublishedAppIntegrationOutput.from_dict(res) - def list(self) -> Iterator[GetPublishedAppIntegrationOutput]: + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[GetPublishedAppIntegrationOutput]: """Get published oauth app integrations. - Get the list of published oauth app integrations for the specified Databricks account + Get the list of published OAuth app integrations for the specified Databricks account + + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` """ + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', - headers=headers) - parsed = GetPublishedAppIntegrationsOutput.from_dict(json).apps - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', + query=query, + headers=headers) + if 'apps' in json: + for v in json['apps']: + yield GetPublishedAppIntegrationOutput.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def update(self, integration_id: str, *, token_access_policy: Optional[TokenAccessPolicy] = None): """Updates Published OAuth App Integration. - Updates an existing published OAuth App Integration. You can retrieve the published oauth app + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the published oauth app integration + Token access policy to be updated in the published OAuth app integration """ diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 0f3d00de9..b1c43a926 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -25,19 +25,29 @@ @dataclass class Ai21LabsConfig: - ai21labs_api_key: str - """The Databricks secret key reference for an AI21Labs API key.""" + ai21labs_api_key: Optional[str] = None + """The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API + key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the + following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" + + ai21labs_api_key_plaintext: Optional[str] = None + """An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the + following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the Ai21LabsConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key + if self.ai21labs_api_key_plaintext is not None: + body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig: """Deserializes the Ai21LabsConfig from a dictionary.""" - return cls(ai21labs_api_key=d.get('ai21labs_api_key', None)) + return cls(ai21labs_api_key=d.get('ai21labs_api_key', None), + ai21labs_api_key_plaintext=d.get('ai21labs_api_key_plaintext', None)) @dataclass @@ -45,24 +55,44 @@ class AmazonBedrockConfig: aws_region: str """The AWS region to use. Bedrock has to be enabled there.""" - aws_access_key_id: str - """The Databricks secret key reference for an AWS Access Key ID with permissions to interact with - Bedrock services.""" - - aws_secret_access_key: str - """The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID, - with permissions to interact with Bedrock services.""" - bedrock_provider: AmazonBedrockConfigBedrockProvider """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.""" + aws_access_key_id: Optional[str] = None + """The Databricks secret key reference for an AWS access key ID with permissions to interact with + Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You + must provide an API key using one of the following fields: `aws_access_key_id` or + `aws_access_key_id_plaintext`.""" + + aws_access_key_id_plaintext: Optional[str] = None + """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext + string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. + You must provide an API key using one of the following fields: `aws_access_key_id` or + `aws_access_key_id_plaintext`.""" + + aws_secret_access_key: Optional[str] = None + """The Databricks secret key reference for an AWS secret access key paired with the access key ID, + with permissions to interact with Bedrock services. If you prefer to paste your API key + directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the + following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" + + aws_secret_access_key_plaintext: Optional[str] = None + """An AWS secret access key paired with the access key ID, with permissions to interact with + Bedrock services provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the + following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" + def as_dict(self) -> dict: """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id + if self.aws_access_key_id_plaintext is not None: + body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext if self.aws_region is not None: body['aws_region'] = self.aws_region if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key + if self.aws_secret_access_key_plaintext is not None: + body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value return body @@ -70,8 +100,10 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig: """Deserializes the AmazonBedrockConfig from a dictionary.""" return cls(aws_access_key_id=d.get('aws_access_key_id', None), + aws_access_key_id_plaintext=d.get('aws_access_key_id_plaintext', None), aws_region=d.get('aws_region', None), aws_secret_access_key=d.get('aws_secret_access_key', None), + aws_secret_access_key_plaintext=d.get('aws_secret_access_key_plaintext', None), bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider)) @@ -87,19 +119,29 @@ class AmazonBedrockConfigBedrockProvider(Enum): @dataclass class AnthropicConfig: - anthropic_api_key: str - """The Databricks secret key reference for an Anthropic API key.""" + anthropic_api_key: Optional[str] = None + """The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API + key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the + following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" + + anthropic_api_key_plaintext: Optional[str] = None + """The Anthropic API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the + following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key + if self.anthropic_api_key_plaintext is not None: + body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig: """Deserializes the AnthropicConfig from a dictionary.""" - return cls(anthropic_api_key=d.get('anthropic_api_key', None)) + return cls(anthropic_api_key=d.get('anthropic_api_key', None), + anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None)) @dataclass @@ -249,7 +291,6 @@ def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts: class AppDeploymentMode(Enum): AUTO_SYNC = 'AUTO_SYNC' - MODE_UNSPECIFIED = 'MODE_UNSPECIFIED' SNAPSHOT = 'SNAPSHOT' @@ -257,7 +298,6 @@ class AppDeploymentState(Enum): FAILED = 'FAILED' IN_PROGRESS = 'IN_PROGRESS' - STATE_UNSPECIFIED = 'STATE_UNSPECIFIED' STOPPED = 'STOPPED' SUCCEEDED = 'SUCCEEDED' @@ -308,7 +348,6 @@ class AppState(Enum): IDLE = 'IDLE' RUNNING = 'RUNNING' STARTING = 'STARTING' - STATE_UNSPECIFIED = 'STATE_UNSPECIFIED' @dataclass @@ -467,19 +506,35 @@ class ChatMessageRole(Enum): @dataclass class CohereConfig: - cohere_api_key: str - """The Databricks secret key reference for a Cohere API key.""" + cohere_api_base: Optional[str] = None + """This is an optional field to provide a customized base URL for the Cohere API. If left + unspecified, the standard Cohere base URL is used.""" + + cohere_api_key: Optional[str] = None + """The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key + directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following + fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" + + cohere_api_key_plaintext: Optional[str] = None + """The Cohere API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following + fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the CohereConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key + if self.cohere_api_key_plaintext is not None: + body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> CohereConfig: """Deserializes the CohereConfig from a dictionary.""" - return cls(cohere_api_key=d.get('cohere_api_key', None)) + return cls(cohere_api_base=d.get('cohere_api_base', None), + cohere_api_key=d.get('cohere_api_key', None), + cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None)) @dataclass @@ -576,19 +631,30 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint: @dataclass class DatabricksModelServingConfig: - databricks_api_token: str - """The Databricks secret key reference for a Databricks API token that corresponds to a user or - service principal with Can Query access to the model serving endpoint pointed to by this - external model.""" - databricks_workspace_url: str """The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.""" + databricks_api_token: Optional[str] = None + """The Databricks secret key reference for a Databricks API token that corresponds to a user or + service principal with Can Query access to the model serving endpoint pointed to by this + external model. If you prefer to paste your API key directly, see + `databricks_api_token_plaintext`. You must provide an API key using one of the following fields: + `databricks_api_token` or `databricks_api_token_plaintext`.""" + + databricks_api_token_plaintext: Optional[str] = None + """The Databricks API token that corresponds to a user or service principal with Can Query access + to the model serving endpoint pointed to by this external model provided as a plaintext string. + If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`. You + must provide an API key using one of the following fields: `databricks_api_token` or + `databricks_api_token_plaintext`.""" + def as_dict(self) -> dict: """Serializes the DatabricksModelServingConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token + if self.databricks_api_token_plaintext is not None: + body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext if self.databricks_workspace_url is not None: body['databricks_workspace_url'] = self.databricks_workspace_url return body @@ -597,6 +663,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig: """Deserializes the DatabricksModelServingConfig from a dictionary.""" return cls(databricks_api_token=d.get('databricks_api_token', None), + databricks_api_token_plaintext=d.get('databricks_api_token_plaintext', None), databricks_workspace_url=d.get('databricks_workspace_url', None)) @@ -849,6 +916,7 @@ class EndpointStateConfigUpdate(Enum): IN_PROGRESS = 'IN_PROGRESS' NOT_UPDATING = 'NOT_UPDATING' + UPDATE_CANCELED = 'UPDATE_CANCELED' UPDATE_FAILED = 'UPDATE_FAILED' @@ -924,8 +992,8 @@ def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse: class ExternalModel: provider: ExternalModelProvider """The name of the provider for the external model. Currently, the supported providers are - 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and - 'palm'.",""" + 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', + 'google-cloud-vertex-ai', 'openai', and 'palm'.",""" name: str """The name of the external model.""" @@ -948,6 +1016,9 @@ class ExternalModel: databricks_model_serving_config: Optional[DatabricksModelServingConfig] = None """Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.""" + google_cloud_vertex_ai_config: Optional[GoogleCloudVertexAiConfig] = None + """Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.""" + openai_config: Optional[OpenAiConfig] = None """OpenAI Config. Only required if the provider is 'openai'.""" @@ -963,6 +1034,8 @@ def as_dict(self) -> dict: if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict() if self.databricks_model_serving_config: body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict() + if self.google_cloud_vertex_ai_config: + body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config.as_dict() if self.name is not None: body['name'] = self.name if self.openai_config: body['openai_config'] = self.openai_config.as_dict() if self.palm_config: body['palm_config'] = self.palm_config.as_dict() @@ -979,6 +1052,8 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel: cohere_config=_from_dict(d, 'cohere_config', CohereConfig), databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config', DatabricksModelServingConfig), + google_cloud_vertex_ai_config=_from_dict(d, 'google_cloud_vertex_ai_config', + GoogleCloudVertexAiConfig), name=d.get('name', None), openai_config=_from_dict(d, 'openai_config', OpenAiConfig), palm_config=_from_dict(d, 'palm_config', PaLmConfig), @@ -988,14 +1063,15 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel: class ExternalModelProvider(Enum): """The name of the provider for the external model. Currently, the supported providers are - 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and - 'palm'.",""" + 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', + 'google-cloud-vertex-ai', 'openai', and 'palm'.",""" AI21LABS = 'ai21labs' AMAZON_BEDROCK = 'amazon-bedrock' ANTHROPIC = 'anthropic' COHERE = 'cohere' DATABRICKS_MODEL_SERVING = 'databricks-model-serving' + GOOGLE_CLOUD_VERTEX_AI = 'google-cloud-vertex-ai' OPENAI = 'openai' PALM = 'palm' @@ -1093,6 +1169,51 @@ def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsRespo permission_levels=_repeated_dict(d, 'permission_levels', ServingEndpointPermissionsDescription)) +@dataclass +class GoogleCloudVertexAiConfig: + private_key: Optional[str] = None + """The Databricks secret key reference for a private key for the service account which has access + to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys]. + If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an + API key using one of the following fields: `private_key` or `private_key_plaintext` + + [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" + + private_key_plaintext: Optional[str] = None + """The private key for the service account which has access to the Google Cloud Vertex AI Service + provided as a plaintext secret. See [Best practices for managing service account keys]. If you + prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an + API key using one of the following fields: `private_key` or `private_key_plaintext`. + + [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" + + project_id: Optional[str] = None + """This is the Google Cloud project id that the service account is associated with.""" + + region: Optional[str] = None + """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more + details. Some models are only available in specific regions. + + [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations""" + + def as_dict(self) -> dict: + """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.private_key is not None: body['private_key'] = self.private_key + if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext + if self.project_id is not None: body['project_id'] = self.project_id + if self.region is not None: body['region'] = self.region + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig: + """Deserializes the GoogleCloudVertexAiConfig from a dictionary.""" + return cls(private_key=d.get('private_key', None), + private_key_plaintext=d.get('private_key_plaintext', None), + project_id=d.get('project_id', None), + region=d.get('region', None)) + + @dataclass class ListAppDeploymentsResponse: app_deployments: Optional[List[AppDeployment]] = None @@ -1175,19 +1296,35 @@ class OpenAiConfig: """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.""" microsoft_entra_client_secret: Optional[str] = None - """The Databricks secret key reference for the Microsoft Entra Client Secret that is only required - for Azure AD OpenAI.""" + """The Databricks secret key reference for a client secret used for Microsoft Entra ID + authentication. If you prefer to paste your client secret directly, see + `microsoft_entra_client_secret_plaintext`. You must provide an API key using one of the + following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.""" + + microsoft_entra_client_secret_plaintext: Optional[str] = None + """The client secret used for Microsoft Entra ID authentication provided as a plaintext string. If + you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`. + You must provide an API key using one of the following fields: `microsoft_entra_client_secret` + or `microsoft_entra_client_secret_plaintext`.""" microsoft_entra_tenant_id: Optional[str] = None """This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.""" openai_api_base: Optional[str] = None - """This is the base URL for the OpenAI API (default: "https://api.openai.com/v1"). For Azure - OpenAI, this field is required, and is the base URL for the Azure OpenAI API service provided by - Azure.""" + """This is a field to provide a customized base URl for the OpenAI API. For Azure OpenAI, this + field is required, and is the base URL for the Azure OpenAI API service provided by Azure. For + other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI + base URL is used.""" openai_api_key: Optional[str] = None - """The Databricks secret key reference for an OpenAI or Azure OpenAI API key.""" + """The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If + you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an + API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" + + openai_api_key_plaintext: Optional[str] = None + """The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you + prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an + API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" openai_api_type: Optional[str] = None """This is an optional field to specify the type of OpenAI API to use. For Azure OpenAI, this field @@ -1213,10 +1350,14 @@ def as_dict(self) -> dict: body['microsoft_entra_client_id'] = self.microsoft_entra_client_id if self.microsoft_entra_client_secret is not None: body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret + if self.microsoft_entra_client_secret_plaintext is not None: + body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext if self.microsoft_entra_tenant_id is not None: body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key + if self.openai_api_key_plaintext is not None: + body['openai_api_key_plaintext'] = self.openai_api_key_plaintext if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version if self.openai_deployment_name is not None: @@ -1229,9 +1370,12 @@ def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig: """Deserializes the OpenAiConfig from a dictionary.""" return cls(microsoft_entra_client_id=d.get('microsoft_entra_client_id', None), microsoft_entra_client_secret=d.get('microsoft_entra_client_secret', None), + microsoft_entra_client_secret_plaintext=d.get('microsoft_entra_client_secret_plaintext', + None), microsoft_entra_tenant_id=d.get('microsoft_entra_tenant_id', None), openai_api_base=d.get('openai_api_base', None), openai_api_key=d.get('openai_api_key', None), + openai_api_key_plaintext=d.get('openai_api_key_plaintext', None), openai_api_type=d.get('openai_api_type', None), openai_api_version=d.get('openai_api_version', None), openai_deployment_name=d.get('openai_deployment_name', None), @@ -1240,19 +1384,29 @@ def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig: @dataclass class PaLmConfig: - palm_api_key: str - """The Databricks secret key reference for a PaLM API key.""" + palm_api_key: Optional[str] = None + """The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key + directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following + fields: `palm_api_key` or `palm_api_key_plaintext`.""" + + palm_api_key_plaintext: Optional[str] = None + """The PaLM API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following + fields: `palm_api_key` or `palm_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the PaLmConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key + if self.palm_api_key_plaintext is not None: + body['palm_api_key_plaintext'] = self.palm_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> PaLmConfig: """Deserializes the PaLmConfig from a dictionary.""" - return cls(palm_api_key=d.get('palm_api_key', None)) + return cls(palm_api_key=d.get('palm_api_key', None), + palm_api_key_plaintext=d.get('palm_api_key_plaintext', None)) @dataclass @@ -1584,11 +1738,10 @@ class ServedEntityInput: external_model: Optional[ExternalModel] = None """The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with - the latter set being used for custom model serving for a Databricks registered model. When an - external_model is present, the served entities list can only have one served_entity object. For - an existing endpoint with external_model, it can not be updated to an endpoint without + the latter set being used for custom model serving for a Databricks registered model. For an + existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add - external_model later.""" + external_model later. The task type of all external models within an endpoint must be the same.""" instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" @@ -2858,7 +3011,8 @@ def wait_get_serving_endpoint_not_updating( callback: Optional[Callable[[ServingEndpointDetailed], None]] = None) -> ServingEndpointDetailed: deadline = time.time() + timeout.total_seconds() target_states = (EndpointStateConfigUpdate.NOT_UPDATING, ) - failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, ) + failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, EndpointStateConfigUpdate.UPDATE_CANCELED, + ) status_message = 'polling...' attempt = 1 while time.time() < deadline: diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index b02323848..d5593a1e1 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -147,7 +147,6 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWin class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum): - DAY_OF_WEEK_UNSPECIFIED = 'DAY_OF_WEEK_UNSPECIFIED' FRIDAY = 'FRIDAY' MONDAY = 'MONDAY' SATURDAY = 'SATURDAY' @@ -192,7 +191,6 @@ class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum): SECOND_AND_FOURTH_OF_MONTH = 'SECOND_AND_FOURTH_OF_MONTH' SECOND_OF_MONTH = 'SECOND_OF_MONTH' THIRD_OF_MONTH = 'THIRD_OF_MONTH' - WEEK_DAY_FREQUENCY_UNSPECIFIED = 'WEEK_DAY_FREQUENCY_UNSPECIFIED' @dataclass @@ -281,7 +279,7 @@ def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting: class ComplianceStandard(Enum): """Compliance stardard for SHIELD customers""" - COMPLIANCE_STANDARD_UNSPECIFIED = 'COMPLIANCE_STANDARD_UNSPECIFIED' + CANADA_PROTECTED_B = 'CANADA_PROTECTED_B' CYBER_ESSENTIAL_PLUS = 'CYBER_ESSENTIAL_PLUS' FEDRAMP_HIGH = 'FEDRAMP_HIGH' FEDRAMP_IL5 = 'FEDRAMP_IL5' @@ -293,6 +291,38 @@ class ComplianceStandard(Enum): PCI_DSS = 'PCI_DSS' +@dataclass +class Config: + email: Optional[EmailConfig] = None + + generic_webhook: Optional[GenericWebhookConfig] = None + + microsoft_teams: Optional[MicrosoftTeamsConfig] = None + + pagerduty: Optional[PagerdutyConfig] = None + + slack: Optional[SlackConfig] = None + + def as_dict(self) -> dict: + """Serializes the Config into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.email: body['email'] = self.email.as_dict() + if self.generic_webhook: body['generic_webhook'] = self.generic_webhook.as_dict() + if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams.as_dict() + if self.pagerduty: body['pagerduty'] = self.pagerduty.as_dict() + if self.slack: body['slack'] = self.slack.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Config: + """Deserializes the Config from a dictionary.""" + return cls(email=_from_dict(d, 'email', EmailConfig), + generic_webhook=_from_dict(d, 'generic_webhook', GenericWebhookConfig), + microsoft_teams=_from_dict(d, 'microsoft_teams', MicrosoftTeamsConfig), + pagerduty=_from_dict(d, 'pagerduty', PagerdutyConfig), + slack=_from_dict(d, 'slack', SlackConfig)) + + @dataclass class CreateIpAccessList: """Details required to configure a block list or allow list.""" @@ -367,6 +397,27 @@ def from_dict(cls, d: Dict[str, any]) -> CreateNetworkConnectivityConfigRequest: return cls(name=d.get('name', None), region=d.get('region', None)) +@dataclass +class CreateNotificationDestinationRequest: + config: Optional[Config] = None + """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + def as_dict(self) -> dict: + """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.config: body['config'] = self.config.as_dict() + if self.display_name is not None: body['display_name'] = self.display_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateNotificationDestinationRequest: + """Deserializes the CreateNotificationDestinationRequest from a dictionary.""" + return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None)) + + @dataclass class CreateOboTokenRequest: """Configuration details for creating on-behalf tokens.""" @@ -705,6 +756,46 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingRes return cls(etag=d.get('etag', None)) +class DestinationType(Enum): + + EMAIL = 'EMAIL' + MICROSOFT_TEAMS = 'MICROSOFT_TEAMS' + PAGERDUTY = 'PAGERDUTY' + SLACK = 'SLACK' + WEBHOOK = 'WEBHOOK' + + +@dataclass +class EmailConfig: + addresses: Optional[List[str]] = None + """Email addresses to notify.""" + + def as_dict(self) -> dict: + """Serializes the EmailConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.addresses: body['addresses'] = [v for v in self.addresses] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EmailConfig: + """Deserializes the EmailConfig from a dictionary.""" + return cls(addresses=d.get('addresses', None)) + + +@dataclass +class Empty: + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + @dataclass class EnhancedSecurityMonitoring: """SHIELD feature: ESM""" @@ -920,6 +1011,48 @@ def from_dict(cls, d: Dict[str, any]) -> FetchIpAccessListResponse: return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) +@dataclass +class GenericWebhookConfig: + password: Optional[str] = None + """[Input-Only][Optional] Password for webhook.""" + + password_set: Optional[bool] = None + """[Output-Only] Whether password is set.""" + + url: Optional[str] = None + """[Input-Only] URL for webhook.""" + + url_set: Optional[bool] = None + """[Output-Only] Whether URL is set.""" + + username: Optional[str] = None + """[Input-Only][Optional] Username for webhook.""" + + username_set: Optional[bool] = None + """[Output-Only] Whether username is set.""" + + def as_dict(self) -> dict: + """Serializes the GenericWebhookConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.password is not None: body['password'] = self.password + if self.password_set is not None: body['password_set'] = self.password_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + if self.username is not None: body['username'] = self.username + if self.username_set is not None: body['username_set'] = self.username_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenericWebhookConfig: + """Deserializes the GenericWebhookConfig from a dictionary.""" + return cls(password=d.get('password', None), + password_set=d.get('password_set', None), + url=d.get('url', None), + url_set=d.get('url_set', None), + username=d.get('username', None), + username_set=d.get('username_set', None)) + + @dataclass class GetIpAccessListResponse: ip_access_list: Optional[IpAccessListInfo] = None @@ -1118,6 +1251,54 @@ def from_dict(cls, d: Dict[str, any]) -> ListNetworkConnectivityConfigurationsRe next_page_token=d.get('next_page_token', None)) +@dataclass +class ListNotificationDestinationsResponse: + next_page_token: Optional[str] = None + """Page token for next of results.""" + + results: Optional[List[ListNotificationDestinationsResult]] = None + + def as_dict(self) -> dict: + """Serializes the ListNotificationDestinationsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResponse: + """Deserializes the ListNotificationDestinationsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', ListNotificationDestinationsResult)) + + +@dataclass +class ListNotificationDestinationsResult: + destination_type: Optional[DestinationType] = None + """[Output-only] The type of the notification destination. The type can not be changed once set.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + id: Optional[str] = None + """UUID identifying notification destination.""" + + def as_dict(self) -> dict: + """Serializes the ListNotificationDestinationsResult into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.destination_type is not None: body['destination_type'] = self.destination_type.value + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResult: + """Deserializes the ListNotificationDestinationsResult from a dictionary.""" + return cls(destination_type=_enum(d, 'destination_type', DestinationType), + display_name=d.get('display_name', None), + id=d.get('id', None)) + + @dataclass class ListPublicTokensResponse: token_infos: Optional[List[PublicTokenInfo]] = None @@ -1164,6 +1345,27 @@ class ListType(Enum): BLOCK = 'BLOCK' +@dataclass +class MicrosoftTeamsConfig: + url: Optional[str] = None + """[Input-Only] URL for Microsoft Teams.""" + + url_set: Optional[bool] = None + """[Output-Only] Whether URL is set.""" + + def as_dict(self) -> dict: + """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MicrosoftTeamsConfig: + """Deserializes the MicrosoftTeamsConfig from a dictionary.""" + return cls(url=d.get('url', None), url_set=d.get('url_set', None)) + + @dataclass class NccAwsStableIpRule: """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to @@ -1450,6 +1652,61 @@ def from_dict(cls, d: Dict[str, any]) -> NetworkConnectivityConfiguration: updated_time=d.get('updated_time', None)) +@dataclass +class NotificationDestination: + config: Optional[Config] = None + """The configuration for the notification destination. Will be exactly one of the nested configs. + Only returns for users with workspace admin permissions.""" + + destination_type: Optional[DestinationType] = None + """[Output-only] The type of the notification destination. The type can not be changed once set.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + id: Optional[str] = None + """UUID identifying notification destination.""" + + def as_dict(self) -> dict: + """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.config: body['config'] = self.config.as_dict() + if self.destination_type is not None: body['destination_type'] = self.destination_type.value + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> NotificationDestination: + """Deserializes the NotificationDestination from a dictionary.""" + return cls(config=_from_dict(d, 'config', Config), + destination_type=_enum(d, 'destination_type', DestinationType), + display_name=d.get('display_name', None), + id=d.get('id', None)) + + +@dataclass +class PagerdutyConfig: + integration_key: Optional[str] = None + """[Input-Only] Integration key for PagerDuty.""" + + integration_key_set: Optional[bool] = None + """[Output-Only] Whether integration key is set.""" + + def as_dict(self) -> dict: + """Serializes the PagerdutyConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.integration_key is not None: body['integration_key'] = self.integration_key + if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PagerdutyConfig: + """Deserializes the PagerdutyConfig from a dictionary.""" + return cls(integration_key=d.get('integration_key', None), + integration_key_set=d.get('integration_key_set', None)) + + @dataclass class PartitionId: """Partition by workspace or account""" @@ -1642,7 +1899,6 @@ class RestrictWorkspaceAdminsMessageStatus(Enum): ALLOW_ALL = 'ALLOW_ALL' RESTRICT_TOKENS_AND_JOB_RUN_AS = 'RESTRICT_TOKENS_AND_JOB_RUN_AS' - STATUS_UNSPECIFIED = 'STATUS_UNSPECIFIED' @dataclass @@ -1726,6 +1982,27 @@ def from_dict(cls, d: Dict[str, any]) -> SetStatusResponse: return cls() +@dataclass +class SlackConfig: + url: Optional[str] = None + """[Input-Only] URL for Slack destination.""" + + url_set: Optional[bool] = None + """[Output-Only] Whether URL is set.""" + + def as_dict(self) -> dict: + """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> SlackConfig: + """Deserializes the SlackConfig from a dictionary.""" + return cls(url=d.get('url', None), url_set=d.get('url_set', None)) + + @dataclass class StringMessage: value: Optional[str] = None @@ -2189,6 +2466,32 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateIpAccessList: list_type=_enum(d, 'list_type', ListType)) +@dataclass +class UpdateNotificationDestinationRequest: + config: Optional[Config] = None + """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.config: body['config'] = self.config.as_dict() + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateNotificationDestinationRequest: + """Deserializes the UpdateNotificationDestinationRequest from a dictionary.""" + return cls(config=_from_dict(d, 'config', Config), + display_name=d.get('display_name', None), + id=d.get('id', None)) + + @dataclass class UpdatePersonalComputeSettingRequest: """Details required to update a setting.""" @@ -3402,6 +3705,122 @@ def list_private_endpoint_rules( query['page_token'] = json['next_page_token'] +class NotificationDestinationsAPI: + """The notification destinations API lets you programmatically manage a workspace's notification + destinations. Notification destinations are used to send notifications for query alerts and jobs to + destinations outside of Databricks. Only workspace admins can create, update, and delete notification + destinations.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + config: Optional[Config] = None, + display_name: Optional[str] = None) -> NotificationDestination: + """Create a notification destination. + + Creates a notification destination. Requires workspace admin permissions. + + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + """ + body = {} + if config is not None: body['config'] = config.as_dict() + if display_name is not None: body['display_name'] = display_name + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/notification-destinations', body=body, headers=headers) + return NotificationDestination.from_dict(res) + + def delete(self, id: str): + """Delete a notification destination. + + Deletes a notification destination. Requires workspace admin permissions. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/notification-destinations/{id}', headers=headers) + + def get(self, id: str) -> NotificationDestination: + """Get a notification destination. + + Gets a notification destination. + + :param id: str + + :returns: :class:`NotificationDestination` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/notification-destinations/{id}', headers=headers) + return NotificationDestination.from_dict(res) + + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ListNotificationDestinationsResult]: + """List notification destinations. + + Lists notification destinations. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListNotificationDestinationsResult` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/notification-destinations', query=query, headers=headers) + if 'results' in json: + for v in json['results']: + yield ListNotificationDestinationsResult.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, + id: str, + *, + config: Optional[Config] = None, + display_name: Optional[str] = None) -> NotificationDestination: + """Update a notification destination. + + Updates a notification destination. Requires workspace admin permissions. At least one field is + required in the request body. + + :param id: str + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + """ + body = {} + if config is not None: body['config'] = config.as_dict() + if display_name is not None: body['display_name'] = display_name + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/notification-destinations/{id}', body=body, headers=headers) + return NotificationDestination.from_dict(res) + + class PersonalComputeAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index d716fad93..fc411ff83 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -788,6 +788,7 @@ class Privilege(Enum): CREATE_VIEW = 'CREATE_VIEW' CREATE_VOLUME = 'CREATE_VOLUME' EXECUTE = 'EXECUTE' + MANAGE = 'MANAGE' MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' MODIFY = 'MODIFY' READ_FILES = 'READ_FILES' diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index b363ab7d2..bcb46bb50 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -46,69 +46,206 @@ def from_dict(cls, d: Dict[str, any]) -> AccessControl: @dataclass class Alert: - created_at: Optional[str] = None - """Timestamp when the alert was created.""" + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" - id: Optional[str] = None - """Alert ID.""" + create_time: Optional[str] = None + """The timestamp indicating when the alert was created.""" - last_triggered_at: Optional[str] = None - """Timestamp when the alert was last triggered.""" + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - name: Optional[str] = None - """Name of the alert.""" + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - options: Optional[AlertOptions] = None - """Alert configuration options.""" + display_name: Optional[str] = None + """The display name of the alert.""" - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" + id: Optional[str] = None + """UUID identifying the alert.""" - query: Optional[AlertQuery] = None + lifecycle_state: Optional[LifecycleState] = None + """The workspace state of the alert. Used for tracking trashed status.""" - rearm: Optional[int] = None - """Number of seconds after being triggered before the alert rearms itself and can be triggered - again. If `null`, alert will never be triggered again.""" + owner_user_name: Optional[str] = None + """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" + + parent_path: Optional[str] = None + """The workspace path of the folder containing the alert.""" + + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" + + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" state: Optional[AlertState] = None - """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated - and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" + """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not + yet been evaluated or ran into an error during the last evaluation.""" - updated_at: Optional[str] = None - """Timestamp when the alert was last updated.""" + trigger_time: Optional[str] = None + """Timestamp when the alert was last triggered, if the alert has been triggered before.""" - user: Optional[User] = None + update_time: Optional[str] = None + """The timestamp indicating when the alert was updated.""" def as_dict(self) -> dict: """Serializes the Alert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at + if self.condition: body['condition'] = self.condition.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name if self.id is not None: body['id'] = self.id - if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.query: body['query'] = self.query.as_dict() - if self.rearm is not None: body['rearm'] = self.rearm + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger if self.state is not None: body['state'] = self.state.value - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Alert: """Deserializes the Alert from a dictionary.""" - return cls(created_at=d.get('created_at', None), + return cls(condition=_from_dict(d, 'condition', AlertCondition), + create_time=d.get('create_time', None), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), id=d.get('id', None), - last_triggered_at=d.get('last_triggered_at', None), - name=d.get('name', None), - options=_from_dict(d, 'options', AlertOptions), - parent=d.get('parent', None), - query=_from_dict(d, 'query', AlertQuery), - rearm=d.get('rearm', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + parent_path=d.get('parent_path', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None), state=_enum(d, 'state', AlertState), - updated_at=d.get('updated_at', None), - user=_from_dict(d, 'user', User)) + trigger_time=d.get('trigger_time', None), + update_time=d.get('update_time', None)) + + +@dataclass +class AlertCondition: + empty_result_state: Optional[AlertState] = None + """Alert state if result is empty.""" + + op: Optional[AlertOperator] = None + """Operator used for comparison in alert evaluation.""" + + operand: Optional[AlertConditionOperand] = None + """Name of the column from the query result to use for comparison in alert evaluation.""" + + threshold: Optional[AlertConditionThreshold] = None + """Threshold value used for comparison in alert evaluation.""" + + def as_dict(self) -> dict: + """Serializes the AlertCondition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value + if self.op is not None: body['op'] = self.op.value + if self.operand: body['operand'] = self.operand.as_dict() + if self.threshold: body['threshold'] = self.threshold.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertCondition: + """Deserializes the AlertCondition from a dictionary.""" + return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState), + op=_enum(d, 'op', AlertOperator), + operand=_from_dict(d, 'operand', AlertConditionOperand), + threshold=_from_dict(d, 'threshold', AlertConditionThreshold)) + + +@dataclass +class AlertConditionOperand: + column: Optional[AlertOperandColumn] = None + + def as_dict(self) -> dict: + """Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.column: body['column'] = self.column.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand: + """Deserializes the AlertConditionOperand from a dictionary.""" + return cls(column=_from_dict(d, 'column', AlertOperandColumn)) + + +@dataclass +class AlertConditionThreshold: + value: Optional[AlertOperandValue] = None + + def as_dict(self) -> dict: + """Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value: body['value'] = self.value.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold: + """Deserializes the AlertConditionThreshold from a dictionary.""" + return cls(value=_from_dict(d, 'value', AlertOperandValue)) + + +@dataclass +class AlertOperandColumn: + name: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn: + """Deserializes the AlertOperandColumn from a dictionary.""" + return cls(name=d.get('name', None)) + + +@dataclass +class AlertOperandValue: + bool_value: Optional[bool] = None + + double_value: Optional[float] = None + + string_value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.double_value is not None: body['double_value'] = self.double_value + if self.string_value is not None: body['string_value'] = self.string_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue: + """Deserializes the AlertOperandValue from a dictionary.""" + return cls(bool_value=d.get('bool_value', None), + double_value=d.get('double_value', None), + string_value=d.get('string_value', None)) + + +class AlertOperator(Enum): + + EQUAL = 'EQUAL' + GREATER_THAN = 'GREATER_THAN' + GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' + IS_NULL = 'IS_NULL' + LESS_THAN = 'LESS_THAN' + LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' + NOT_EQUAL = 'NOT_EQUAL' @dataclass @@ -259,12 +396,10 @@ def from_dict(cls, d: Dict[str, any]) -> AlertQuery: class AlertState(Enum): - """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated - and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" - OK = 'ok' - TRIGGERED = 'triggered' - UNKNOWN = 'unknown' + OK = 'OK' + TRIGGERED = 'TRIGGERED' + UNKNOWN = 'UNKNOWN' @dataclass @@ -338,10 +473,10 @@ def from_dict(cls, d: Dict[str, any]) -> Channel: @dataclass class ChannelInfo: - """Channel information for the SQL warehouse at the time of query execution""" + """Details about a Channel.""" dbsql_version: Optional[str] = None - """DBSQL Version the channel is mapped to""" + """DB SQL Version the Channel is mapped to.""" name: Optional[ChannelName] = None """Name of the channel""" @@ -360,7 +495,6 @@ def from_dict(cls, d: Dict[str, any]) -> ChannelInfo: class ChannelName(Enum): - """Name of the channel""" CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' @@ -369,6 +503,29 @@ class ChannelName(Enum): CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED' +@dataclass +class ClientCallContext: + """Client code that triggered the request""" + + file_name: Optional[EncodedText] = None + """File name that contains the last line that triggered the request.""" + + line_number: Optional[int] = None + """Last line number within a file or notebook cell that triggered the request.""" + + def as_dict(self) -> dict: + """Serializes the ClientCallContext into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.file_name: body['file_name'] = self.file_name.as_dict() + if self.line_number is not None: body['line_number'] = self.line_number + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ClientCallContext: + """Deserializes the ClientCallContext from a dictionary.""" + return cls(file_name=_from_dict(d, 'file_name', EncodedText), line_number=d.get('line_number', None)) + + @dataclass class ColumnInfo: name: Optional[str] = None @@ -443,6 +600,68 @@ class ColumnInfoTypeName(Enum): USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' +@dataclass +class ContextFilter: + dbsql_alert_id: Optional[str] = None + """Databricks SQL Alert id""" + + dbsql_dashboard_id: Optional[str] = None + """Databricks SQL Dashboard id""" + + dbsql_query_id: Optional[str] = None + """Databricks SQL Query id""" + + dbsql_session_id: Optional[str] = None + """Databricks SQL Query session id""" + + job_id: Optional[str] = None + """Databricks Workflows id""" + + job_run_id: Optional[str] = None + """Databricks Workflows task run id""" + + lakeview_dashboard_id: Optional[str] = None + """Databricks Lakeview Dashboard id""" + + notebook_cell_run_id: Optional[str] = None + """Databricks Notebook runnableCommandId""" + + notebook_id: Optional[str] = None + """Databricks Notebook id""" + + statement_ids: Optional[List[str]] = None + """Databricks Query History statement ids.""" + + def as_dict(self) -> dict: + """Serializes the ContextFilter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dbsql_alert_id is not None: body['dbsql_alert_id'] = self.dbsql_alert_id + if self.dbsql_dashboard_id is not None: body['dbsql_dashboard_id'] = self.dbsql_dashboard_id + if self.dbsql_query_id is not None: body['dbsql_query_id'] = self.dbsql_query_id + if self.dbsql_session_id is not None: body['dbsql_session_id'] = self.dbsql_session_id + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.lakeview_dashboard_id is not None: body['lakeview_dashboard_id'] = self.lakeview_dashboard_id + if self.notebook_cell_run_id is not None: body['notebook_cell_run_id'] = self.notebook_cell_run_id + if self.notebook_id is not None: body['notebook_id'] = self.notebook_id + if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ContextFilter: + """Deserializes the ContextFilter from a dictionary.""" + return cls(dbsql_alert_id=d.get('dbsql_alert_id', None), + dbsql_dashboard_id=d.get('dbsql_dashboard_id', None), + dbsql_query_id=d.get('dbsql_query_id', None), + dbsql_session_id=d.get('dbsql_session_id', None), + job_id=d.get('job_id', None), + job_run_id=d.get('job_run_id', None), + lakeview_dashboard_id=d.get('lakeview_dashboard_id', None), + notebook_cell_run_id=d.get('notebook_cell_run_id', None), + notebook_id=d.get('notebook_id', None), + statement_ids=d.get('statement_ids', None)) + + @dataclass class CreateAlert: name: str @@ -482,98 +701,304 @@ def from_dict(cls, d: Dict[str, any]) -> CreateAlert: @dataclass -class CreateWarehouseRequest: - auto_stop_mins: Optional[int] = None - """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) - before it is automatically stopped. - - Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - - Defaults to 120 mins""" +class CreateAlertRequest: + alert: Optional[CreateAlertRequestAlert] = None - channel: Optional[Channel] = None - """Channel Details""" + def as_dict(self) -> dict: + """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alert: body['alert'] = self.alert.as_dict() + return body - cluster_size: Optional[str] = None - """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows - you to run larger queries on it. If you want to increase the number of concurrent queries, - please tune max_num_clusters. - - Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - - 4X-Large""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest: + """Deserializes the CreateAlertRequest from a dictionary.""" + return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert)) - creator_name: Optional[str] = None - """warehouse creator name""" - enable_photon: Optional[bool] = None - """Configures whether the warehouse should use Photon optimized clusters. +@dataclass +class CreateAlertRequestAlert: + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" + + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. - Defaults to false.""" + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - enable_serverless_compute: Optional[bool] = None - """Configures whether the warehouse should use serverless compute""" + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - instance_profile_arn: Optional[str] = None - """Deprecated. Instance profile used to pass IAM role to the cluster""" + display_name: Optional[str] = None + """The display name of the alert.""" - max_num_clusters: Optional[int] = None - """Maximum number of clusters that the autoscaler will create to handle concurrent queries. - - Supported values: - Must be >= min_num_clusters - Must be <= 30. - - Defaults to min_clusters if unset.""" + parent_path: Optional[str] = None + """The workspace path of the folder containing the alert.""" - min_num_clusters: Optional[int] = None - """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing - this will ensure that a larger number of clusters are always running and therefore may reduce - the cold start time for new queries. This is similar to reserved vs. revocable cores in a - resource manager. - - Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - - Defaults to 1""" + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" - name: Optional[str] = None - """Logical name for the cluster. - - Supported values: - Must be unique within an org. - Must be less than 100 characters.""" + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" + def as_dict(self) -> dict: + """Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.condition: body['condition'] = self.condition.as_dict() + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + return body - tags: Optional[EndpointTags] = None - """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS - volumes) associated with this SQL warehouse. - - Supported values: - Number of tags < 45.""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert: + """Deserializes the CreateAlertRequestAlert from a dictionary.""" + return cls(condition=_from_dict(d, 'condition', AlertCondition), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), + parent_path=d.get('parent_path', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None)) - warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" + +@dataclass +class CreateQueryRequest: + query: Optional[CreateQueryRequestQuery] = None def as_dict(self) -> dict: - """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel.as_dict() - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: - body['enable_serverless_compute'] = self.enable_serverless_compute - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.spot_instance_policy is not None: - body['spot_instance_policy'] = self.spot_instance_policy.value - if self.tags: body['tags'] = self.tags.as_dict() - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + if self.query: body['query'] = self.query.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest: - """Deserializes the CreateWarehouseRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest: + """Deserializes the CreateQueryRequest from a dictionary.""" + return cls(query=_from_dict(d, 'query', CreateQueryRequestQuery)) + + +@dataclass +class CreateQueryRequestQuery: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" + + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" + + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" + + parent_path: Optional[str] = None + """Workspace path of the workspace folder containing the object.""" + + query_text: Optional[str] = None + """Text of the query to be run.""" + + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" + + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" + + tags: Optional[List[str]] = None + + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" + + def as_dict(self) -> dict: + """Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery: + """Deserializes the CreateQueryRequestQuery from a dictionary.""" + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + description=d.get('description', None), + display_name=d.get('display_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + parent_path=d.get('parent_path', None), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), + tags=d.get('tags', None), + warehouse_id=d.get('warehouse_id', None)) + + +@dataclass +class CreateVisualizationRequest: + visualization: Optional[CreateVisualizationRequestVisualization] = None + + def as_dict(self) -> dict: + """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.visualization: body['visualization'] = self.visualization.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest: + """Deserializes the CreateVisualizationRequest from a dictionary.""" + return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization)) + + +@dataclass +class CreateVisualizationRequestVisualization: + display_name: Optional[str] = None + """The display name of the visualization.""" + + query_id: Optional[str] = None + """UUID of the query that the visualization is attached to.""" + + serialized_options: Optional[str] = None + """The visualization options varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying visualization options directly.""" + + serialized_query_plan: Optional[str] = None + """The visualization query plan varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying the visualization query plan directly.""" + + type: Optional[str] = None + """The type of visualization: counter, table, funnel, and so on.""" + + def as_dict(self) -> dict: + """Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization: + """Deserializes the CreateVisualizationRequestVisualization from a dictionary.""" + return cls(display_name=d.get('display_name', None), + query_id=d.get('query_id', None), + serialized_options=d.get('serialized_options', None), + serialized_query_plan=d.get('serialized_query_plan', None), + type=d.get('type', None)) + + +@dataclass +class CreateWarehouseRequest: + auto_stop_mins: Optional[int] = None + """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) + before it is automatically stopped. + + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. + + Defaults to 120 mins""" + + channel: Optional[Channel] = None + """Channel Details""" + + cluster_size: Optional[str] = None + """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows + you to run larger queries on it. If you want to increase the number of concurrent queries, + please tune max_num_clusters. + + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large + - 4X-Large""" + + creator_name: Optional[str] = None + """warehouse creator name""" + + enable_photon: Optional[bool] = None + """Configures whether the warehouse should use Photon optimized clusters. + + Defaults to false.""" + + enable_serverless_compute: Optional[bool] = None + """Configures whether the warehouse should use serverless compute""" + + instance_profile_arn: Optional[str] = None + """Deprecated. Instance profile used to pass IAM role to the cluster""" + + max_num_clusters: Optional[int] = None + """Maximum number of clusters that the autoscaler will create to handle concurrent queries. + + Supported values: - Must be >= min_num_clusters - Must be <= 30. + + Defaults to min_clusters if unset.""" + + min_num_clusters: Optional[int] = None + """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing + this will ensure that a larger number of clusters are always running and therefore may reduce + the cold start time for new queries. This is similar to reserved vs. revocable cores in a + resource manager. + + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) + + Defaults to 1""" + + name: Optional[str] = None + """Logical name for the cluster. + + Supported values: - Must be unique within an org. - Must be less than 100 characters.""" + + spot_instance_policy: Optional[SpotInstancePolicy] = None + """Configurations whether the warehouse should use spot instances.""" + + tags: Optional[EndpointTags] = None + """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS + volumes) associated with this SQL warehouse. + + Supported values: - Number of tags < 45.""" + + warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None + """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`.""" + + def as_dict(self) -> dict: + """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel.as_dict() + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: + body['enable_serverless_compute'] = self.enable_serverless_compute + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: + body['spot_instance_policy'] = self.spot_instance_policy.value + if self.tags: body['tags'] = self.tags.as_dict() + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest: + """Deserializes the CreateWarehouseRequest from a dictionary.""" return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), @@ -914,6 +1339,121 @@ def from_dict(cls, d: Dict[str, any]) -> DataSource: warehouse_id=d.get('warehouse_id', None)) +class DatePrecision(Enum): + + DAY_PRECISION = 'DAY_PRECISION' + MINUTE_PRECISION = 'MINUTE_PRECISION' + SECOND_PRECISION = 'SECOND_PRECISION' + + +@dataclass +class DateRange: + start: str + + end: str + + def as_dict(self) -> dict: + """Serializes the DateRange into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.end is not None: body['end'] = self.end + if self.start is not None: body['start'] = self.start + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DateRange: + """Deserializes the DateRange from a dictionary.""" + return cls(end=d.get('end', None), start=d.get('start', None)) + + +@dataclass +class DateRangeValue: + date_range_value: Optional[DateRange] = None + """Manually specified date-time range value.""" + + dynamic_date_range_value: Optional[DateRangeValueDynamicDateRange] = None + """Dynamic date-time range value based on current date-time.""" + + precision: Optional[DatePrecision] = None + """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION + (YYYY-MM-DD).""" + + start_day_of_week: Optional[int] = None + + def as_dict(self) -> dict: + """Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() + if self.dynamic_date_range_value is not None: + body['dynamic_date_range_value'] = self.dynamic_date_range_value.value + if self.precision is not None: body['precision'] = self.precision.value + if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DateRangeValue: + """Deserializes the DateRangeValue from a dictionary.""" + return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange), + dynamic_date_range_value=_enum(d, 'dynamic_date_range_value', + DateRangeValueDynamicDateRange), + precision=_enum(d, 'precision', DatePrecision), + start_day_of_week=d.get('start_day_of_week', None)) + + +class DateRangeValueDynamicDateRange(Enum): + + LAST_12_MONTHS = 'LAST_12_MONTHS' + LAST_14_DAYS = 'LAST_14_DAYS' + LAST_24_HOURS = 'LAST_24_HOURS' + LAST_30_DAYS = 'LAST_30_DAYS' + LAST_60_DAYS = 'LAST_60_DAYS' + LAST_7_DAYS = 'LAST_7_DAYS' + LAST_8_HOURS = 'LAST_8_HOURS' + LAST_90_DAYS = 'LAST_90_DAYS' + LAST_HOUR = 'LAST_HOUR' + LAST_MONTH = 'LAST_MONTH' + LAST_WEEK = 'LAST_WEEK' + LAST_YEAR = 'LAST_YEAR' + THIS_MONTH = 'THIS_MONTH' + THIS_WEEK = 'THIS_WEEK' + THIS_YEAR = 'THIS_YEAR' + TODAY = 'TODAY' + YESTERDAY = 'YESTERDAY' + + +@dataclass +class DateValue: + date_value: Optional[str] = None + """Manually specified date-time value.""" + + dynamic_date_value: Optional[DateValueDynamicDate] = None + """Dynamic date-time value based on current date-time.""" + + precision: Optional[DatePrecision] = None + """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION + (YYYY-MM-DD).""" + + def as_dict(self) -> dict: + """Serializes the DateValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.date_value is not None: body['date_value'] = self.date_value + if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value + if self.precision is not None: body['precision'] = self.precision.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DateValue: + """Deserializes the DateValue from a dictionary.""" + return cls(date_value=d.get('date_value', None), + dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate), + precision=_enum(d, 'precision', DatePrecision)) + + +class DateValueDynamicDate(Enum): + + NOW = 'NOW' + YESTERDAY = 'YESTERDAY' + + @dataclass class DeleteResponse: @@ -1141,6 +1681,50 @@ def from_dict(cls, d: Dict[str, any]) -> EditWarehouseResponse: return cls() +@dataclass +class Empty: + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + +@dataclass +class EncodedText: + encoding: Optional[EncodedTextEncoding] = None + """Carry text data in different form.""" + + text: Optional[str] = None + """text data""" + + def as_dict(self) -> dict: + """Serializes the EncodedText into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.encoding is not None: body['encoding'] = self.encoding.value + if self.text is not None: body['text'] = self.text + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EncodedText: + """Deserializes the EncodedText from a dictionary.""" + return cls(encoding=_enum(d, 'encoding', EncodedTextEncoding), text=d.get('text', None)) + + +class EncodedTextEncoding(Enum): + """Carry text data in different form.""" + + BASE64 = 'BASE64' + PLAIN = 'PLAIN' + + @dataclass class EndpointConfPair: key: Optional[str] = None @@ -1385,6 +1969,33 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTags: return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair)) +@dataclass +class EnumValue: + enum_options: Optional[str] = None + """List of valid query parameter values, newline delimited.""" + + multi_values_options: Optional[MultiValuesOptions] = None + """If specified, allows multiple values to be selected for this parameter.""" + + values: Optional[List[str]] = None + """List of selected query parameter values.""" + + def as_dict(self) -> dict: + """Serializes the EnumValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.enum_options is not None: body['enum_options'] = self.enum_options + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() + if self.values: body['values'] = [v for v in self.values] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnumValue: + """Deserializes the EnumValue from a dictionary.""" + return cls(enum_options=d.get('enum_options', None), + multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), + values=d.get('values', None)) + + @dataclass class ExecuteStatementRequest: statement: str @@ -1567,47 +2178,10 @@ class ExecuteStatementRequestOnWaitTimeout(Enum): @dataclass -class ExecuteStatementResponse: - manifest: Optional[ResultManifest] = None - """The result manifest provides schema and metadata for the result set.""" - - result: Optional[ResultData] = None - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" - - statement_id: Optional[str] = None - """The statement ID is returned upon successfully submitting a SQL statement, and is a required - reference for all subsequent calls.""" - - status: Optional[StatementStatus] = None - """The status response includes execution state and if relevant, error information.""" - - def as_dict(self) -> dict: - """Serializes the ExecuteStatementResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.manifest: body['manifest'] = self.manifest.as_dict() - if self.result: body['result'] = self.result.as_dict() - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.status: body['status'] = self.status.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementResponse: - """Deserializes the ExecuteStatementResponse from a dictionary.""" - return cls(manifest=_from_dict(d, 'manifest', ResultManifest), - result=_from_dict(d, 'result', ResultData), - statement_id=d.get('statement_id', None), - status=_from_dict(d, 'status', StatementStatus)) - - -@dataclass -class ExternalLink: - byte_count: Optional[int] = None - """The number of bytes in the result chunk. This field is not available when using `INLINE` - disposition.""" +class ExternalLink: + byte_count: Optional[int] = None + """The number of bytes in the result chunk. This field is not available when using `INLINE` + disposition.""" chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" @@ -1706,43 +2280,6 @@ def from_dict(cls, d: Dict[str, any]) -> GetResponse: object_type=_enum(d, 'object_type', ObjectType)) -@dataclass -class GetStatementResponse: - manifest: Optional[ResultManifest] = None - """The result manifest provides schema and metadata for the result set.""" - - result: Optional[ResultData] = None - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" - - statement_id: Optional[str] = None - """The statement ID is returned upon successfully submitting a SQL statement, and is a required - reference for all subsequent calls.""" - - status: Optional[StatementStatus] = None - """The status response includes execution state and if relevant, error information.""" - - def as_dict(self) -> dict: - """Serializes the GetStatementResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.manifest: body['manifest'] = self.manifest.as_dict() - if self.result: body['result'] = self.result.as_dict() - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.status: body['status'] = self.status.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> GetStatementResponse: - """Deserializes the GetStatementResponse from a dictionary.""" - return cls(manifest=_from_dict(d, 'manifest', ResultManifest), - result=_from_dict(d, 'result', ResultData), - statement_id=d.get('statement_id', None), - status=_from_dict(d, 'status', StatementStatus)) - - @dataclass class GetWarehousePermissionLevelsResponse: permission_levels: Optional[List[WarehousePermissionsDescription]] = None @@ -1988,6 +2525,386 @@ class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum): PASSTHROUGH = 'PASSTHROUGH' +@dataclass +class LegacyAlert: + created_at: Optional[str] = None + """Timestamp when the alert was created.""" + + id: Optional[str] = None + """Alert ID.""" + + last_triggered_at: Optional[str] = None + """Timestamp when the alert was last triggered.""" + + name: Optional[str] = None + """Name of the alert.""" + + options: Optional[AlertOptions] = None + """Alert configuration options.""" + + parent: Optional[str] = None + """The identifier of the workspace folder containing the object.""" + + query: Optional[AlertQuery] = None + + rearm: Optional[int] = None + """Number of seconds after being triggered before the alert rearms itself and can be triggered + again. If `null`, alert will never be triggered again.""" + + state: Optional[LegacyAlertState] = None + """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated + and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" + + updated_at: Optional[str] = None + """Timestamp when the alert was last updated.""" + + user: Optional[User] = None + + def as_dict(self) -> dict: + """Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.query: body['query'] = self.query.as_dict() + if self.rearm is not None: body['rearm'] = self.rearm + if self.state is not None: body['state'] = self.state.value + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> LegacyAlert: + """Deserializes the LegacyAlert from a dictionary.""" + return cls(created_at=d.get('created_at', None), + id=d.get('id', None), + last_triggered_at=d.get('last_triggered_at', None), + name=d.get('name', None), + options=_from_dict(d, 'options', AlertOptions), + parent=d.get('parent', None), + query=_from_dict(d, 'query', AlertQuery), + rearm=d.get('rearm', None), + state=_enum(d, 'state', LegacyAlertState), + updated_at=d.get('updated_at', None), + user=_from_dict(d, 'user', User)) + + +class LegacyAlertState(Enum): + """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated + and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" + + OK = 'ok' + TRIGGERED = 'triggered' + UNKNOWN = 'unknown' + + +@dataclass +class LegacyQuery: + can_edit: Optional[bool] = None + """Describes whether the authenticated user is allowed to edit the definition of this query.""" + + created_at: Optional[str] = None + """The timestamp when this query was created.""" + + data_source_id: Optional[str] = None + """Data source ID maps to the ID of the data source used by the resource and is distinct from the + warehouse ID. [Learn more] + + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + id: Optional[str] = None + """Query ID.""" + + is_archived: Optional[bool] = None + """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear + in search results. If this boolean is `true`, the `options` property for this query includes a + `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" + + is_draft: Optional[bool] = None + """Whether the query is a draft. Draft queries only appear in list views for their owners. + Visualizations from draft queries cannot appear on dashboards.""" + + is_favorite: Optional[bool] = None + """Whether this query object appears in the current user's favorites list. This flag determines + whether the star icon for favorites is selected.""" + + is_safe: Optional[bool] = None + """Text parameter types are not safe from SQL injection for all types of data source. Set this + Boolean parameter to `true` if a query either does not use any text type parameters or uses a + data source type where text type parameters are handled safely.""" + + last_modified_by: Optional[User] = None + + last_modified_by_id: Optional[int] = None + """The ID of the user who last saved changes to this query.""" + + latest_query_data_id: Optional[str] = None + """If there is a cached result for this query and user, this field includes the query result ID. If + this query uses parameters, this field is always null.""" + + name: Optional[str] = None + """The title of this query that appears in list views, widget headings, and on the query page.""" + + options: Optional[QueryOptions] = None + + parent: Optional[str] = None + """The identifier of the workspace folder containing the object.""" + + permission_tier: Optional[PermissionLevel] = None + """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query + * `CAN_MANAGE`: Can manage the query""" + + query: Optional[str] = None + """The text of the query to be run.""" + + query_hash: Optional[str] = None + """A SHA-256 hash of the query text along with the authenticated user ID.""" + + run_as_role: Optional[RunAsRole] = None + """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + + tags: Optional[List[str]] = None + + updated_at: Optional[str] = None + """The timestamp at which this query was last updated.""" + + user: Optional[User] = None + + user_id: Optional[int] = None + """The ID of the user who owns the query.""" + + visualizations: Optional[List[LegacyVisualization]] = None + + def as_dict(self) -> dict: + """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() + if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id + if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value + if self.query is not None: body['query'] = self.query + if self.query_hash is not None: body['query_hash'] = self.query_hash + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() + if self.user_id is not None: body['user_id'] = self.user_id + if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> LegacyQuery: + """Deserializes the LegacyQuery from a dictionary.""" + return cls(can_edit=d.get('can_edit', None), + created_at=d.get('created_at', None), + data_source_id=d.get('data_source_id', None), + description=d.get('description', None), + id=d.get('id', None), + is_archived=d.get('is_archived', None), + is_draft=d.get('is_draft', None), + is_favorite=d.get('is_favorite', None), + is_safe=d.get('is_safe', None), + last_modified_by=_from_dict(d, 'last_modified_by', User), + last_modified_by_id=d.get('last_modified_by_id', None), + latest_query_data_id=d.get('latest_query_data_id', None), + name=d.get('name', None), + options=_from_dict(d, 'options', QueryOptions), + parent=d.get('parent', None), + permission_tier=_enum(d, 'permission_tier', PermissionLevel), + query=d.get('query', None), + query_hash=d.get('query_hash', None), + run_as_role=_enum(d, 'run_as_role', RunAsRole), + tags=d.get('tags', None), + updated_at=d.get('updated_at', None), + user=_from_dict(d, 'user', User), + user_id=d.get('user_id', None), + visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization)) + + +@dataclass +class LegacyVisualization: + """The visualization description API changes frequently and is unsupported. You can duplicate a + visualization by copying description objects received _from the API_ and then using them to + create a new one with a POST request to the same endpoint. Databricks does not recommend + constructing ad-hoc visualizations entirely in JSON.""" + + created_at: Optional[str] = None + + description: Optional[str] = None + """A short description of this visualization. This is not displayed in the UI.""" + + id: Optional[str] = None + """The UUID for this visualization.""" + + name: Optional[str] = None + """The name of the visualization that appears on dashboards and the query screen.""" + + options: Optional[Any] = None + """The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON.""" + + query: Optional[LegacyQuery] = None + + type: Optional[str] = None + """The type of visualization: chart, table, pivot table, and so on.""" + + updated_at: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query: body['query'] = self.query.as_dict() + if self.type is not None: body['type'] = self.type + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization: + """Deserializes the LegacyVisualization from a dictionary.""" + return cls(created_at=d.get('created_at', None), + description=d.get('description', None), + id=d.get('id', None), + name=d.get('name', None), + options=d.get('options', None), + query=_from_dict(d, 'query', LegacyQuery), + type=d.get('type', None), + updated_at=d.get('updated_at', None)) + + +class LifecycleState(Enum): + + ACTIVE = 'ACTIVE' + TRASHED = 'TRASHED' + + +@dataclass +class ListAlertsResponse: + next_page_token: Optional[str] = None + + results: Optional[List[ListAlertsResponseAlert]] = None + + def as_dict(self) -> dict: + """Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse: + """Deserializes the ListAlertsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', ListAlertsResponseAlert)) + + +@dataclass +class ListAlertsResponseAlert: + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" + + create_time: Optional[str] = None + """The timestamp indicating when the alert was created.""" + + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + display_name: Optional[str] = None + """The display name of the alert.""" + + id: Optional[str] = None + """UUID identifying the alert.""" + + lifecycle_state: Optional[LifecycleState] = None + """The workspace state of the alert. Used for tracking trashed status.""" + + owner_user_name: Optional[str] = None + """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" + + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" + + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" + + state: Optional[AlertState] = None + """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not + yet been evaluated or ran into an error during the last evaluation.""" + + trigger_time: Optional[str] = None + """Timestamp when the alert was last triggered, if the alert has been triggered before.""" + + update_time: Optional[str] = None + """The timestamp indicating when the alert was updated.""" + + def as_dict(self) -> dict: + """Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.condition: body['condition'] = self.condition.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state.value + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert: + """Deserializes the ListAlertsResponseAlert from a dictionary.""" + return cls(condition=_from_dict(d, 'condition', AlertCondition), + create_time=d.get('create_time', None), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), + id=d.get('id', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None), + state=_enum(d, 'state', AlertState), + trigger_time=d.get('trigger_time', None), + update_time=d.get('update_time', None)) + + class ListOrder(Enum): CREATED_AT = 'created_at' @@ -2020,6 +2937,118 @@ def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse: res=_repeated_dict(d, 'res', QueryInfo)) +@dataclass +class ListQueryObjectsResponse: + next_page_token: Optional[str] = None + + results: Optional[List[ListQueryObjectsResponseQuery]] = None + + def as_dict(self) -> dict: + """Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse: + """Deserializes the ListQueryObjectsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery)) + + +@dataclass +class ListQueryObjectsResponseQuery: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" + + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" + + create_time: Optional[str] = None + """Timestamp when this query was created.""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" + + id: Optional[str] = None + """UUID identifying the query.""" + + last_modifier_user_name: Optional[str] = None + """Username of the user who last saved changes to this query.""" + + lifecycle_state: Optional[LifecycleState] = None + """Indicates whether the query is trashed.""" + + owner_user_name: Optional[str] = None + """Username of the user that owns the query.""" + + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" + + query_text: Optional[str] = None + """Text of the query to be run.""" + + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" + + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" + + tags: Optional[List[str]] = None + + update_time: Optional[str] = None + """Timestamp when this query was last updated.""" + + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" + + def as_dict(self) -> dict: + """Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: + body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery: + """Deserializes the ListQueryObjectsResponseQuery from a dictionary.""" + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + create_time=d.get('create_time', None), + description=d.get('description', None), + display_name=d.get('display_name', None), + id=d.get('id', None), + last_modifier_user_name=d.get('last_modifier_user_name', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), + tags=d.get('tags', None), + update_time=d.get('update_time', None), + warehouse_id=d.get('warehouse_id', None)) + + @dataclass class ListResponse: count: Optional[int] = None @@ -2052,6 +3081,26 @@ def from_dict(cls, d: Dict[str, any]) -> ListResponse: results=_repeated_dict(d, 'results', Dashboard)) +@dataclass +class ListVisualizationsForQueryResponse: + next_page_token: Optional[str] = None + + results: Optional[List[Visualization]] = None + + def as_dict(self) -> dict: + """Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse: + """Deserializes the ListVisualizationsForQueryResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', Visualization)) + + @dataclass class ListWarehousesResponse: warehouses: Optional[List[EndpointInfo]] = None @@ -2071,9 +3120,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse: @dataclass class MultiValuesOptions: - """If specified, allows multiple values to be selected for this parameter. Only applies to dropdown - list and query-based dropdown list parameters.""" - prefix: Optional[str] = None """Character that prefixes each selected parameter value.""" @@ -2099,6 +3145,22 @@ def from_dict(cls, d: Dict[str, any]) -> MultiValuesOptions: suffix=d.get('suffix', None)) +@dataclass +class NumericValue: + value: Optional[float] = None + + def as_dict(self) -> dict: + """Serializes the NumericValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> NumericValue: + """Deserializes the NumericValue from a dictionary.""" + return cls(value=d.get('value', None)) + + class ObjectType(Enum): """A singular noun object type.""" @@ -2223,7 +3285,7 @@ class PermissionLevel(Enum): class PlansState(Enum): - """Whether plans exist for the execution, or the reason why they are missing""" + """Possible Reasons for which we have not saved plans in the database""" EMPTY = 'EMPTY' EXISTS = 'EXISTS' @@ -2233,143 +3295,128 @@ class PlansState(Enum): UNKNOWN = 'UNKNOWN' -@dataclass -class Query: - can_edit: Optional[bool] = None - """Describes whether the authenticated user is allowed to edit the definition of this query.""" - - created_at: Optional[str] = None - """The timestamp when this query was created.""" - - data_source_id: Optional[str] = None - """Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - - description: Optional[str] = None - """General description that conveys additional information about this query such as usage notes.""" - - id: Optional[str] = None - """Query ID.""" - - is_archived: Optional[bool] = None - """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear - in search results. If this boolean is `true`, the `options` property for this query includes a - `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" +@dataclass +class Query: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" - is_draft: Optional[bool] = None - """Whether the query is a draft. Draft queries only appear in list views for their owners. - Visualizations from draft queries cannot appear on dashboards.""" + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" - is_favorite: Optional[bool] = None - """Whether this query object appears in the current user's favorites list. This flag determines - whether the star icon for favorites is selected.""" + create_time: Optional[str] = None + """Timestamp when this query was created.""" - is_safe: Optional[bool] = None - """Text parameter types are not safe from SQL injection for all types of data source. Set this - Boolean parameter to `true` if a query either does not use any text type parameters or uses a - data source type where text type parameters are handled safely.""" + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" - last_modified_by: Optional[User] = None + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" - last_modified_by_id: Optional[int] = None - """The ID of the user who last saved changes to this query.""" + id: Optional[str] = None + """UUID identifying the query.""" - latest_query_data_id: Optional[str] = None - """If there is a cached result for this query and user, this field includes the query result ID. If - this query uses parameters, this field is always null.""" + last_modifier_user_name: Optional[str] = None + """Username of the user who last saved changes to this query.""" - name: Optional[str] = None - """The title of this query that appears in list views, widget headings, and on the query page.""" + lifecycle_state: Optional[LifecycleState] = None + """Indicates whether the query is trashed.""" - options: Optional[QueryOptions] = None + owner_user_name: Optional[str] = None + """Username of the user that owns the query.""" - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" - permission_tier: Optional[PermissionLevel] = None - """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query - * `CAN_MANAGE`: Can manage the query""" + parent_path: Optional[str] = None + """Workspace path of the workspace folder containing the object.""" - query: Optional[str] = None - """The text of the query to be run.""" + query_text: Optional[str] = None + """Text of the query to be run.""" - query_hash: Optional[str] = None - """A SHA-256 hash of the query text along with the authenticated user ID.""" + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" tags: Optional[List[str]] = None - updated_at: Optional[str] = None - """The timestamp at which this query was last updated.""" - - user: Optional[User] = None - - user_id: Optional[int] = None - """The ID of the user who owns the query.""" + update_time: Optional[str] = None + """Timestamp when this query was last updated.""" - visualizations: Optional[List[Visualization]] = None + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" def as_dict(self) -> dict: """Serializes the Query into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_edit is not None: body['can_edit'] = self.can_edit - if self.created_at is not None: body['created_at'] = self.created_at - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.is_safe is not None: body['is_safe'] = self.is_safe - if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() - if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id - if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value - if self.query is not None: body['query'] = self.query - if self.query_hash is not None: body['query_hash'] = self.query_hash - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.last_modifier_user_name is not None: + body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema if self.tags: body['tags'] = [v for v in self.tags] - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() - if self.user_id is not None: body['user_id'] = self.user_id - if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Query: """Deserializes the Query from a dictionary.""" - return cls(can_edit=d.get('can_edit', None), - created_at=d.get('created_at', None), - data_source_id=d.get('data_source_id', None), + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + create_time=d.get('create_time', None), description=d.get('description', None), + display_name=d.get('display_name', None), id=d.get('id', None), - is_archived=d.get('is_archived', None), - is_draft=d.get('is_draft', None), - is_favorite=d.get('is_favorite', None), - is_safe=d.get('is_safe', None), - last_modified_by=_from_dict(d, 'last_modified_by', User), - last_modified_by_id=d.get('last_modified_by_id', None), - latest_query_data_id=d.get('latest_query_data_id', None), - name=d.get('name', None), - options=_from_dict(d, 'options', QueryOptions), - parent=d.get('parent', None), - permission_tier=_enum(d, 'permission_tier', PermissionLevel), - query=d.get('query', None), - query_hash=d.get('query_hash', None), - run_as_role=_enum(d, 'run_as_role', RunAsRole), + last_modifier_user_name=d.get('last_modifier_user_name', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + parent_path=d.get('parent_path', None), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), tags=d.get('tags', None), - updated_at=d.get('updated_at', None), - user=_from_dict(d, 'user', User), - user_id=d.get('user_id', None), - visualizations=_repeated_dict(d, 'visualizations', Visualization)) + update_time=d.get('update_time', None), + warehouse_id=d.get('warehouse_id', None)) + + +@dataclass +class QueryBackedValue: + multi_values_options: Optional[MultiValuesOptions] = None + """If specified, allows multiple values to be selected for this parameter.""" + + query_id: Optional[str] = None + """UUID of the query that provides the parameter values.""" + + values: Optional[List[str]] = None + """List of selected query parameter values.""" + + def as_dict(self) -> dict: + """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() + if self.query_id is not None: body['query_id'] = self.query_id + if self.values: body['values'] = [v for v in self.values] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue: + """Deserializes the QueryBackedValue from a dictionary.""" + return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), + query_id=d.get('query_id', None), + values=d.get('values', None)) @dataclass @@ -2430,12 +3477,11 @@ def from_dict(cls, d: Dict[str, any]) -> QueryEditContent: @dataclass class QueryFilter: - """A filter to limit query history results. This field is optional.""" + context_filter: Optional[ContextFilter] = None + """Filter by one or more property describing where the query was generated""" query_start_time_range: Optional[TimeRange] = None - - statement_ids: Optional[List[str]] = None - """A list of statement IDs.""" + """A range filter for query submitted time. The time range must be <= 30 days.""" statuses: Optional[List[QueryStatus]] = None @@ -2448,8 +3494,8 @@ class QueryFilter: def as_dict(self) -> dict: """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body.""" body = {} + if self.context_filter: body['context_filter'] = self.context_filter.as_dict() if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict() - if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] if self.statuses: body['statuses'] = [v.value for v in self.statuses] if self.user_ids: body['user_ids'] = [v for v in self.user_ids] if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids] @@ -2458,8 +3504,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryFilter: """Deserializes the QueryFilter from a dictionary.""" - return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), - statement_ids=d.get('statement_ids', None), + return cls(context_filter=_from_dict(d, 'context_filter', ContextFilter), + query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), statuses=_repeated_enum(d, 'statuses', QueryStatus), user_ids=d.get('user_ids', None), warehouse_ids=d.get('warehouse_ids', None)) @@ -2467,11 +3513,8 @@ def from_dict(cls, d: Dict[str, any]) -> QueryFilter: @dataclass class QueryInfo: - can_subscribe_to_live_query: Optional[bool] = None - """Reserved for internal use.""" - channel_used: Optional[ChannelInfo] = None - """Channel information for the SQL warehouse at the time of query execution""" + """SQL Warehouse channel information at the time of query execution""" duration: Optional[int] = None """Total execution time of the statement ( excluding result fetch time ).""" @@ -2509,6 +3552,8 @@ class QueryInfo: query_id: Optional[str] = None """The query ID.""" + query_source: Optional[QuerySource] = None + query_start_time_ms: Optional[int] = None """The time the query started.""" @@ -2519,15 +3564,17 @@ class QueryInfo: """The number of results returned by the query.""" spark_ui_url: Optional[str] = None - """URL to the query plan.""" + """URL to the Spark UI query plan.""" statement_type: Optional[QueryStatementType] = None """Type of statement for this query""" status: Optional[QueryStatus] = None - """Query status with one the following values: * `QUEUED`: Query has been received and queued. * - `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: - Query has failed. * `FINISHED`: Query has completed.""" + """Query status with one the following values: + + - `QUEUED`: Query has been received and queued. - `RUNNING`: Query has started. - `CANCELED`: + Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: Query has + completed.""" user_id: Optional[int] = None """The ID of the user who ran the query.""" @@ -2541,8 +3588,6 @@ class QueryInfo: def as_dict(self) -> dict: """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_subscribe_to_live_query is not None: - body['canSubscribeToLiveQuery'] = self.can_subscribe_to_live_query if self.channel_used: body['channel_used'] = self.channel_used.as_dict() if self.duration is not None: body['duration'] = self.duration if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id @@ -2556,6 +3601,7 @@ def as_dict(self) -> dict: if self.plans_state is not None: body['plans_state'] = self.plans_state.value if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms if self.query_id is not None: body['query_id'] = self.query_id + if self.query_source: body['query_source'] = self.query_source.as_dict() if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms if self.query_text is not None: body['query_text'] = self.query_text if self.rows_produced is not None: body['rows_produced'] = self.rows_produced @@ -2570,8 +3616,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryInfo: """Deserializes the QueryInfo from a dictionary.""" - return cls(can_subscribe_to_live_query=d.get('canSubscribeToLiveQuery', None), - channel_used=_from_dict(d, 'channel_used', ChannelInfo), + return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo), duration=d.get('duration', None), endpoint_id=d.get('endpoint_id', None), error_message=d.get('error_message', None), @@ -2584,6 +3629,7 @@ def from_dict(cls, d: Dict[str, any]) -> QueryInfo: plans_state=_enum(d, 'plans_state', PlansState), query_end_time_ms=d.get('query_end_time_ms', None), query_id=d.get('query_id', None), + query_source=_from_dict(d, 'query_source', QuerySource), query_start_time_ms=d.get('query_start_time_ms', None), query_text=d.get('query_text', None), rows_produced=d.get('rows_produced', None), @@ -2606,7 +3652,7 @@ class QueryList: page_size: Optional[int] = None """The number of queries per page.""" - results: Optional[List[Query]] = None + results: Optional[List[LegacyQuery]] = None """List of queries returned.""" def as_dict(self) -> dict: @@ -2624,12 +3670,13 @@ def from_dict(cls, d: Dict[str, any]) -> QueryList: return cls(count=d.get('count', None), page=d.get('page', None), page_size=d.get('page_size', None), - results=_repeated_dict(d, 'results', Query)) + results=_repeated_dict(d, 'results', LegacyQuery)) @dataclass class QueryMetrics: - """Metrics about query execution.""" + """A query metric that encapsulates a set of measurements for a single query. Metrics come from the + driver and are stored in the history service database.""" compilation_time_ms: Optional[int] = None """Time spent loading metadata and optimizing the query, in milliseconds.""" @@ -2637,9 +3684,6 @@ class QueryMetrics: execution_time_ms: Optional[int] = None """Time spent executing the query, in milliseconds.""" - metadata_time_ms: Optional[int] = None - """Reserved for internal use.""" - network_sent_bytes: Optional[int] = None """Total amount of data sent over the network between executor nodes during shuffle, in bytes.""" @@ -2650,9 +3694,6 @@ class QueryMetrics: photon_total_time_ms: Optional[int] = None """Total execution time for all individual Photon query engine tasks in the query, in milliseconds.""" - planning_time_ms: Optional[int] = None - """Reserved for internal use.""" - provisioning_queue_start_timestamp: Optional[int] = None """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the warehouse. This field is optional and will not appear if the query skipped the provisioning @@ -2667,9 +3708,6 @@ class QueryMetrics: query_compilation_start_timestamp: Optional[int] = None """Timestamp of when the underlying compute started compilation of the query.""" - query_execution_time_ms: Optional[int] = None - """Reserved for internal use.""" - read_bytes: Optional[int] = None """Total size of data read by the query, in bytes.""" @@ -2677,7 +3715,7 @@ class QueryMetrics: """Size of persistent data read from the cache, in bytes.""" read_files_count: Optional[int] = None - """Number of files read after pruning.""" + """Number of files read after pruning""" read_partitions_count: Optional[int] = None """Number of partitions read after pruning.""" @@ -2689,7 +3727,7 @@ class QueryMetrics: """Time spent fetching the query results after the execution finished, in milliseconds.""" result_from_cache: Optional[bool] = None - """true if the query result was fetched from cache, false otherwise.""" + """`true` if the query result was fetched from cache, `false` otherwise.""" rows_produced_count: Optional[int] = None """Total number of rows returned by the query.""" @@ -2714,20 +3752,16 @@ def as_dict(self) -> dict: body = {} if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms - if self.metadata_time_ms is not None: body['metadata_time_ms'] = self.metadata_time_ms if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes if self.overloading_queue_start_timestamp is not None: body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms - if self.planning_time_ms is not None: body['planning_time_ms'] = self.planning_time_ms if self.provisioning_queue_start_timestamp is not None: body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count if self.query_compilation_start_timestamp is not None: body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp - if self.query_execution_time_ms is not None: - body['query_execution_time_ms'] = self.query_execution_time_ms if self.read_bytes is not None: body['read_bytes'] = self.read_bytes if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes if self.read_files_count is not None: body['read_files_count'] = self.read_files_count @@ -2748,16 +3782,13 @@ def from_dict(cls, d: Dict[str, any]) -> QueryMetrics: """Deserializes the QueryMetrics from a dictionary.""" return cls(compilation_time_ms=d.get('compilation_time_ms', None), execution_time_ms=d.get('execution_time_ms', None), - metadata_time_ms=d.get('metadata_time_ms', None), network_sent_bytes=d.get('network_sent_bytes', None), overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None), photon_total_time_ms=d.get('photon_total_time_ms', None), - planning_time_ms=d.get('planning_time_ms', None), provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None), pruned_bytes=d.get('pruned_bytes', None), pruned_files_count=d.get('pruned_files_count', None), query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None), - query_execution_time_ms=d.get('query_execution_time_ms', None), read_bytes=d.get('read_bytes', None), read_cache_bytes=d.get('read_cache_bytes', None), read_files_count=d.get('read_files_count', None), @@ -2805,6 +3836,59 @@ def from_dict(cls, d: Dict[str, any]) -> QueryOptions: schema=d.get('schema', None)) +@dataclass +class QueryParameter: + date_range_value: Optional[DateRangeValue] = None + """Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or + `date_range_value`.""" + + date_value: Optional[DateValue] = None + """Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`.""" + + enum_value: Optional[EnumValue] = None + """Dropdown query parameter value.""" + + name: Optional[str] = None + """Literal parameter marker that appears between double curly braces in the query text.""" + + numeric_value: Optional[NumericValue] = None + """Numeric query parameter value.""" + + query_backed_value: Optional[QueryBackedValue] = None + """Query-based dropdown query parameter value.""" + + text_value: Optional[TextValue] = None + """Text query parameter value.""" + + title: Optional[str] = None + """Text displayed in the user-facing parameter widget in the UI.""" + + def as_dict(self) -> dict: + """Serializes the QueryParameter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() + if self.date_value: body['date_value'] = self.date_value.as_dict() + if self.enum_value: body['enum_value'] = self.enum_value.as_dict() + if self.name is not None: body['name'] = self.name + if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict() + if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict() + if self.text_value: body['text_value'] = self.text_value.as_dict() + if self.title is not None: body['title'] = self.title + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueryParameter: + """Deserializes the QueryParameter from a dictionary.""" + return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue), + date_value=_from_dict(d, 'date_value', DateValue), + enum_value=_from_dict(d, 'enum_value', EnumValue), + name=d.get('name', None), + numeric_value=_from_dict(d, 'numeric_value', NumericValue), + query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue), + text_value=_from_dict(d, 'text_value', TextValue), + title=d.get('title', None)) + + @dataclass class QueryPostContent: data_source_id: Optional[str] = None @@ -2862,8 +3946,187 @@ def from_dict(cls, d: Dict[str, any]) -> QueryPostContent: tags=d.get('tags', None)) +@dataclass +class QuerySource: + alert_id: Optional[str] = None + """UUID""" + + client_call_context: Optional[ClientCallContext] = None + """Client code that triggered the request""" + + command_id: Optional[str] = None + """Id associated with a notebook cell""" + + command_run_id: Optional[str] = None + """Id associated with a notebook run or execution""" + + dashboard_id: Optional[str] = None + """UUID""" + + dashboard_v3_id: Optional[str] = None + """UUID for Lakeview Dashboards, separate from DBSQL Dashboards (dashboard_id)""" + + driver_info: Optional[QuerySourceDriverInfo] = None + + entry_point: Optional[QuerySourceEntryPoint] = None + """Spark service that received and processed the query""" + + genie_space_id: Optional[str] = None + """UUID for Genie space""" + + is_cloud_fetch: Optional[bool] = None + + is_databricks_sql_exec_api: Optional[bool] = None + + job_id: Optional[str] = None + + job_managed_by: Optional[QuerySourceJobManager] = None + """With background compute, jobs can be managed by different internal teams. When not specified, + not a background compute job When specified and the value is not JOBS, it is a background + compute job""" + + notebook_id: Optional[str] = None + + pipeline_id: Optional[str] = None + """Id associated with a DLT pipeline""" + + pipeline_update_id: Optional[str] = None + """Id associated with a DLT update""" + + query_tags: Optional[str] = None + """String provided by a customer that'll help them identify the query""" + + run_id: Optional[str] = None + """Id associated with a job run or execution""" + + runnable_command_id: Optional[str] = None + """Id associated with a notebook cell run or execution""" + + scheduled_by: Optional[QuerySourceTrigger] = None + + serverless_channel_info: Optional[ServerlessChannelInfo] = None + + source_query_id: Optional[str] = None + """UUID""" + + def as_dict(self) -> dict: + """Serializes the QuerySource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.client_call_context: body['client_call_context'] = self.client_call_context.as_dict() + if self.command_id is not None: body['command_id'] = self.command_id + if self.command_run_id is not None: body['command_run_id'] = self.command_run_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_v3_id is not None: body['dashboard_v3_id'] = self.dashboard_v3_id + if self.driver_info: body['driver_info'] = self.driver_info.as_dict() + if self.entry_point is not None: body['entry_point'] = self.entry_point.value + if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id + if self.is_cloud_fetch is not None: body['is_cloud_fetch'] = self.is_cloud_fetch + if self.is_databricks_sql_exec_api is not None: + body['is_databricks_sql_exec_api'] = self.is_databricks_sql_exec_api + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_managed_by is not None: body['job_managed_by'] = self.job_managed_by.value + if self.notebook_id is not None: body['notebook_id'] = self.notebook_id + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_update_id is not None: body['pipeline_update_id'] = self.pipeline_update_id + if self.query_tags is not None: body['query_tags'] = self.query_tags + if self.run_id is not None: body['run_id'] = self.run_id + if self.runnable_command_id is not None: body['runnable_command_id'] = self.runnable_command_id + if self.scheduled_by is not None: body['scheduled_by'] = self.scheduled_by.value + if self.serverless_channel_info: + body['serverless_channel_info'] = self.serverless_channel_info.as_dict() + if self.source_query_id is not None: body['source_query_id'] = self.source_query_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QuerySource: + """Deserializes the QuerySource from a dictionary.""" + return cls(alert_id=d.get('alert_id', None), + client_call_context=_from_dict(d, 'client_call_context', ClientCallContext), + command_id=d.get('command_id', None), + command_run_id=d.get('command_run_id', None), + dashboard_id=d.get('dashboard_id', None), + dashboard_v3_id=d.get('dashboard_v3_id', None), + driver_info=_from_dict(d, 'driver_info', QuerySourceDriverInfo), + entry_point=_enum(d, 'entry_point', QuerySourceEntryPoint), + genie_space_id=d.get('genie_space_id', None), + is_cloud_fetch=d.get('is_cloud_fetch', None), + is_databricks_sql_exec_api=d.get('is_databricks_sql_exec_api', None), + job_id=d.get('job_id', None), + job_managed_by=_enum(d, 'job_managed_by', QuerySourceJobManager), + notebook_id=d.get('notebook_id', None), + pipeline_id=d.get('pipeline_id', None), + pipeline_update_id=d.get('pipeline_update_id', None), + query_tags=d.get('query_tags', None), + run_id=d.get('run_id', None), + runnable_command_id=d.get('runnable_command_id', None), + scheduled_by=_enum(d, 'scheduled_by', QuerySourceTrigger), + serverless_channel_info=_from_dict(d, 'serverless_channel_info', ServerlessChannelInfo), + source_query_id=d.get('source_query_id', None)) + + +@dataclass +class QuerySourceDriverInfo: + bi_tool_entry: Optional[str] = None + + driver_name: Optional[str] = None + + simba_branding_vendor: Optional[str] = None + + version_number: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the QuerySourceDriverInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bi_tool_entry is not None: body['bi_tool_entry'] = self.bi_tool_entry + if self.driver_name is not None: body['driver_name'] = self.driver_name + if self.simba_branding_vendor is not None: body['simba_branding_vendor'] = self.simba_branding_vendor + if self.version_number is not None: body['version_number'] = self.version_number + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QuerySourceDriverInfo: + """Deserializes the QuerySourceDriverInfo from a dictionary.""" + return cls(bi_tool_entry=d.get('bi_tool_entry', None), + driver_name=d.get('driver_name', None), + simba_branding_vendor=d.get('simba_branding_vendor', None), + version_number=d.get('version_number', None)) + + +class QuerySourceEntryPoint(Enum): + """Spark service that received and processed the query""" + + DLT = 'DLT' + SPARK_CONNECT = 'SPARK_CONNECT' + THRIFT_SERVER = 'THRIFT_SERVER' + + +class QuerySourceJobManager(Enum): + """Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to + accommodate JOB_MANAGER_UNSPECIFIED""" + + APP_SYSTEM_TABLE = 'APP_SYSTEM_TABLE' + AUTOML = 'AUTOML' + AUTO_MAINTENANCE = 'AUTO_MAINTENANCE' + CLEAN_ROOMS = 'CLEAN_ROOMS' + DATA_MONITORING = 'DATA_MONITORING' + DATA_SHARING = 'DATA_SHARING' + ENCRYPTION = 'ENCRYPTION' + FABRIC_CRAWLER = 'FABRIC_CRAWLER' + JOBS = 'JOBS' + LAKEVIEW = 'LAKEVIEW' + MANAGED_RAG = 'MANAGED_RAG' + SCHEDULED_MV_REFRESH = 'SCHEDULED_MV_REFRESH' + TESTING = 'TESTING' + + +class QuerySourceTrigger(Enum): + + MANUAL = 'MANUAL' + SCHEDULED = 'SCHEDULED' + + class QueryStatementType(Enum): - """Type of statement for this query""" ALTER = 'ALTER' ANALYZE = 'ANALYZE' @@ -2890,15 +4153,16 @@ class QueryStatementType(Enum): class QueryStatus(Enum): - """Query status with one the following values: * `QUEUED`: Query has been received and queued. * - `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: - Query has failed. * `FINISHED`: Query has completed.""" + """Statuses which are also used by OperationStatus in runtime""" CANCELED = 'CANCELED' + COMPILED = 'COMPILED' + COMPILING = 'COMPILING' FAILED = 'FAILED' FINISHED = 'FINISHED' QUEUED = 'QUEUED' RUNNING = 'RUNNING' + STARTED = 'STARTED' @dataclass @@ -3071,6 +4335,12 @@ def from_dict(cls, d: Dict[str, any]) -> ResultSchema: return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo)) +class RunAsMode(Enum): + + OWNER = 'OWNER' + VIEWER = 'VIEWER' + + class RunAsRole(Enum): """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" @@ -3079,6 +4349,23 @@ class RunAsRole(Enum): VIEWER = 'viewer' +@dataclass +class ServerlessChannelInfo: + name: Optional[ChannelName] = None + """Name of the Channel""" + + def as_dict(self) -> dict: + """Serializes the ServerlessChannelInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: body['name'] = self.name.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ServerlessChannelInfo: + """Deserializes the ServerlessChannelInfo from a dictionary.""" + return cls(name=_enum(d, 'name', ChannelName)) + + @dataclass class ServiceError: error_code: Optional[ServiceErrorCode] = None @@ -3292,9 +4579,46 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem: - """Deserializes the StatementParameterListItem from a dictionary.""" - return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None)) + def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem: + """Deserializes the StatementParameterListItem from a dictionary.""" + return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None)) + + +@dataclass +class StatementResponse: + manifest: Optional[ResultManifest] = None + """The result manifest provides schema and metadata for the result set.""" + + result: Optional[ResultData] = None + """Contains the result data of a single chunk when using `INLINE` disposition. When using + `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned + URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the + `external_links` array prepares the API to return multiple links in a single response. Currently + only a single link is returned.)""" + + statement_id: Optional[str] = None + """The statement ID is returned upon successfully submitting a SQL statement, and is a required + reference for all subsequent calls.""" + + status: Optional[StatementStatus] = None + """The status response includes execution state and if relevant, error information.""" + + def as_dict(self) -> dict: + """Serializes the StatementResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.manifest: body['manifest'] = self.manifest.as_dict() + if self.result: body['result'] = self.result.as_dict() + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.status: body['status'] = self.status.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> StatementResponse: + """Deserializes the StatementResponse from a dictionary.""" + return cls(manifest=_from_dict(d, 'manifest', ResultManifest), + result=_from_dict(d, 'result', ResultData), + statement_id=d.get('statement_id', None), + status=_from_dict(d, 'status', StatementStatus)) class StatementState(Enum): @@ -3502,13 +4826,29 @@ class TerminationReasonType(Enum): SUCCESS = 'SUCCESS' +@dataclass +class TextValue: + value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the TextValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> TextValue: + """Deserializes the TextValue from a dictionary.""" + return cls(value=d.get('value', None)) + + @dataclass class TimeRange: end_time_ms: Optional[int] = None - """Limit results to queries that started before this time.""" + """The end time in milliseconds.""" start_time_ms: Optional[int] = None - """Limit results to queries that started after this time.""" + """The start time in milliseconds.""" def as_dict(self) -> dict: """Serializes the TimeRange into a dictionary suitable for use as a JSON request body.""" @@ -3540,6 +4880,179 @@ def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId: return cls(new_owner=d.get('new_owner', None)) +@dataclass +class UpdateAlertRequest: + update_mask: str + """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of + the setting payload will be updated. The field mask needs to be supplied as single string. To + specify multiple fields in the field mask, use comma as the separator (no space).""" + + alert: Optional[UpdateAlertRequestAlert] = None + + id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alert: body['alert'] = self.alert.as_dict() + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest: + """Deserializes the UpdateAlertRequest from a dictionary.""" + return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert), + id=d.get('id', None), + update_mask=d.get('update_mask', None)) + + +@dataclass +class UpdateAlertRequestAlert: + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" + + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + display_name: Optional[str] = None + """The display name of the alert.""" + + owner_user_name: Optional[str] = None + """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" + + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" + + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.condition: body['condition'] = self.condition.as_dict() + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert: + """Deserializes the UpdateAlertRequestAlert from a dictionary.""" + return cls(condition=_from_dict(d, 'condition', AlertCondition), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), + owner_user_name=d.get('owner_user_name', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None)) + + +@dataclass +class UpdateQueryRequest: + update_mask: str + """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of + the setting payload will be updated. The field mask needs to be supplied as single string. To + specify multiple fields in the field mask, use comma as the separator (no space).""" + + id: Optional[str] = None + + query: Optional[UpdateQueryRequestQuery] = None + + def as_dict(self) -> dict: + """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.query: body['query'] = self.query.as_dict() + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest: + """Deserializes the UpdateQueryRequest from a dictionary.""" + return cls(id=d.get('id', None), + query=_from_dict(d, 'query', UpdateQueryRequestQuery), + update_mask=d.get('update_mask', None)) + + +@dataclass +class UpdateQueryRequestQuery: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" + + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" + + owner_user_name: Optional[str] = None + """Username of the user that owns the query.""" + + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" + + query_text: Optional[str] = None + """Text of the query to be run.""" + + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" + + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" + + tags: Optional[List[str]] = None + + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" + + def as_dict(self) -> dict: + """Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery: + """Deserializes the UpdateQueryRequestQuery from a dictionary.""" + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + description=d.get('description', None), + display_name=d.get('display_name', None), + owner_user_name=d.get('owner_user_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), + tags=d.get('tags', None), + warehouse_id=d.get('warehouse_id', None)) + + @dataclass class UpdateResponse: @@ -3554,6 +5067,67 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: return cls() +@dataclass +class UpdateVisualizationRequest: + update_mask: str + """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of + the setting payload will be updated. The field mask needs to be supplied as single string. To + specify multiple fields in the field mask, use comma as the separator (no space).""" + + id: Optional[str] = None + + visualization: Optional[UpdateVisualizationRequestVisualization] = None + + def as_dict(self) -> dict: + """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.visualization: body['visualization'] = self.visualization.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest: + """Deserializes the UpdateVisualizationRequest from a dictionary.""" + return cls(id=d.get('id', None), + update_mask=d.get('update_mask', None), + visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization)) + + +@dataclass +class UpdateVisualizationRequestVisualization: + display_name: Optional[str] = None + """The display name of the visualization.""" + + serialized_options: Optional[str] = None + """The visualization options varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying visualization options directly.""" + + serialized_query_plan: Optional[str] = None + """The visualization query plan varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying the visualization query plan directly.""" + + type: Optional[str] = None + """The type of visualization: counter, table, funnel, and so on.""" + + def as_dict(self) -> dict: + """Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization: + """Deserializes the UpdateVisualizationRequestVisualization from a dictionary.""" + return cls(display_name=d.get('display_name', None), + serialized_options=d.get('serialized_options', None), + serialized_query_plan=d.get('serialized_query_plan', None), + type=d.get('type', None)) + + @dataclass class User: email: Optional[str] = None @@ -3578,57 +5152,56 @@ def from_dict(cls, d: Dict[str, any]) -> User: @dataclass class Visualization: - """The visualization description API changes frequently and is unsupported. You can duplicate a - visualization by copying description objects received _from the API_ and then using them to - create a new one with a POST request to the same endpoint. Databricks does not recommend - constructing ad-hoc visualizations entirely in JSON.""" - - created_at: Optional[str] = None + create_time: Optional[str] = None + """The timestamp indicating when the visualization was created.""" - description: Optional[str] = None - """A short description of this visualization. This is not displayed in the UI.""" + display_name: Optional[str] = None + """The display name of the visualization.""" id: Optional[str] = None - """The UUID for this visualization.""" + """UUID identifying the visualization.""" - name: Optional[str] = None - """The name of the visualization that appears on dashboards and the query screen.""" + query_id: Optional[str] = None + """UUID of the query that the visualization is attached to.""" - options: Optional[Any] = None - """The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON.""" + serialized_options: Optional[str] = None + """The visualization options varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying visualization options directly.""" - query: Optional[Query] = None + serialized_query_plan: Optional[str] = None + """The visualization query plan varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying the visualization query plan directly.""" type: Optional[str] = None - """The type of visualization: chart, table, pivot table, and so on.""" + """The type of visualization: counter, table, funnel, and so on.""" - updated_at: Optional[str] = None + update_time: Optional[str] = None + """The timestamp indicating when the visualization was updated.""" def as_dict(self) -> dict: """Serializes the Visualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.description is not None: body['description'] = self.description + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query: body['query'] = self.query.as_dict() + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan if self.type is not None: body['type'] = self.type - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Visualization: """Deserializes the Visualization from a dictionary.""" - return cls(created_at=d.get('created_at', None), - description=d.get('description', None), + return cls(create_time=d.get('create_time', None), + display_name=d.get('display_name', None), id=d.get('id', None), - name=d.get('name', None), - options=d.get('options', None), - query=_from_dict(d, 'query', Query), + query_id=d.get('query_id', None), + serialized_options=d.get('serialized_options', None), + serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None), - updated_at=d.get('updated_at', None)) + update_time=d.get('update_time', None)) @dataclass @@ -3843,7 +5416,7 @@ class Widget: options: Optional[WidgetOptions] = None - visualization: Optional[Visualization] = None + visualization: Optional[LegacyVisualization] = None """The visualization description API changes frequently and is unsupported. You can duplicate a visualization by copying description objects received _from the API_ and then using them to create a new one with a POST request to the same endpoint. Databricks does not recommend @@ -3866,7 +5439,7 @@ def from_dict(cls, d: Dict[str, any]) -> Widget: """Deserializes the Widget from a dictionary.""" return cls(id=d.get('id', None), options=_from_dict(d, 'options', WidgetOptions), - visualization=_from_dict(d, 'visualization', Visualization), + visualization=_from_dict(d, 'visualization', LegacyVisualization), width=d.get('width', None)) @@ -3919,55 +5492,161 @@ def from_dict(cls, d: Dict[str, any]) -> WidgetOptions: updated_at=d.get('updated_at', None)) -@dataclass -class WidgetPosition: - """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is - unsupported.""" +@dataclass +class WidgetPosition: + """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is + unsupported.""" + + auto_height: Optional[bool] = None + """reserved for internal use""" + + col: Optional[int] = None + """column in the dashboard grid. Values start with 0""" + + row: Optional[int] = None + """row in the dashboard grid. Values start with 0""" + + size_x: Optional[int] = None + """width of the widget measured in dashboard grid cells""" + + size_y: Optional[int] = None + """height of the widget measured in dashboard grid cells""" + + def as_dict(self) -> dict: + """Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.auto_height is not None: body['autoHeight'] = self.auto_height + if self.col is not None: body['col'] = self.col + if self.row is not None: body['row'] = self.row + if self.size_x is not None: body['sizeX'] = self.size_x + if self.size_y is not None: body['sizeY'] = self.size_y + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> WidgetPosition: + """Deserializes the WidgetPosition from a dictionary.""" + return cls(auto_height=d.get('autoHeight', None), + col=d.get('col', None), + row=d.get('row', None), + size_x=d.get('sizeX', None), + size_y=d.get('sizeY', None)) + + +class AlertsAPI: + """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that + periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or + notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of + the Jobs API, e.g. :method:jobs/create.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, alert: Optional[CreateAlertRequestAlert] = None) -> Alert: + """Create an alert. + + Creates an alert. + + :param alert: :class:`CreateAlertRequestAlert` (optional) + + :returns: :class:`Alert` + """ + body = {} + if alert is not None: body['alert'] = alert.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/sql/alerts', body=body, headers=headers) + return Alert.from_dict(res) + + def delete(self, id: str): + """Delete an alert. + + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and + can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently + deleted after 30 days. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/sql/alerts/{id}', headers=headers) - auto_height: Optional[bool] = None - """reserved for internal use""" + def get(self, id: str) -> Alert: + """Get an alert. + + Gets an alert. + + :param id: str + + :returns: :class:`Alert` + """ - col: Optional[int] = None - """column in the dashboard grid. Values start with 0""" + headers = {'Accept': 'application/json', } - row: Optional[int] = None - """row in the dashboard grid. Values start with 0""" + res = self._api.do('GET', f'/api/2.0/sql/alerts/{id}', headers=headers) + return Alert.from_dict(res) - size_x: Optional[int] = None - """width of the widget measured in dashboard grid cells""" + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]: + """List alerts. + + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListAlertsResponseAlert` + """ - size_y: Optional[int] = None - """height of the widget measured in dashboard grid cells""" + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } - def as_dict(self) -> dict: - """Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body.""" + while True: + json = self._api.do('GET', '/api/2.0/sql/alerts', query=query, headers=headers) + if 'results' in json: + for v in json['results']: + yield ListAlertsResponseAlert.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertRequestAlert] = None) -> Alert: + """Update an alert. + + Updates an alert. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param alert: :class:`UpdateAlertRequestAlert` (optional) + + :returns: :class:`Alert` + """ body = {} - if self.auto_height is not None: body['autoHeight'] = self.auto_height - if self.col is not None: body['col'] = self.col - if self.row is not None: body['row'] = self.row - if self.size_x is not None: body['sizeX'] = self.size_x - if self.size_y is not None: body['sizeY'] = self.size_y - return body + if alert is not None: body['alert'] = alert.as_dict() + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - @classmethod - def from_dict(cls, d: Dict[str, any]) -> WidgetPosition: - """Deserializes the WidgetPosition from a dictionary.""" - return cls(auto_height=d.get('autoHeight', None), - col=d.get('col', None), - row=d.get('row', None), - size_x=d.get('sizeX', None), - size_y=d.get('sizeY', None)) + res = self._api.do('PATCH', f'/api/2.0/sql/alerts/{id}', body=body, headers=headers) + return Alert.from_dict(res) -class AlertsAPI: +class AlertsLegacyAPI: """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -3978,15 +5657,14 @@ def create(self, query_id: str, *, parent: Optional[str] = None, - rearm: Optional[int] = None) -> Alert: + rearm: Optional[int] = None) -> LegacyAlert: """Create an alert. Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create + instead. :param name: str Name of the alert. @@ -4000,7 +5678,7 @@ def create(self, Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - :returns: :class:`Alert` + :returns: :class:`LegacyAlert` """ body = {} if name is not None: body['name'] = name @@ -4011,7 +5689,7 @@ def create(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/preview/sql/alerts', body=body, headers=headers) - return Alert.from_dict(res) + return LegacyAlert.from_dict(res) def delete(self, alert_id: str): """Delete an alert. @@ -4019,9 +5697,8 @@ def delete(self, alert_id: str): Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete + instead. :param alert_id: str @@ -4032,41 +5709,39 @@ def delete(self, alert_id: str): self._api.do('DELETE', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers) - def get(self, alert_id: str) -> Alert: + def get(self, alert_id: str) -> LegacyAlert: """Get an alert. Gets an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get + instead. :param alert_id: str - :returns: :class:`Alert` + :returns: :class:`LegacyAlert` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers) - return Alert.from_dict(res) + return LegacyAlert.from_dict(res) - def list(self) -> Iterator[Alert]: + def list(self) -> Iterator[LegacyAlert]: """Get alerts. Gets a list of alerts. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list + instead. - :returns: Iterator over :class:`Alert` + :returns: Iterator over :class:`LegacyAlert` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', '/api/2.0/preview/sql/alerts', headers=headers) - return [Alert.from_dict(v) for v in res] + return [LegacyAlert.from_dict(v) for v in res] def update(self, alert_id: str, @@ -4079,9 +5754,8 @@ def update(self, Updates an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update + instead. :param alert_id: str :param name: str @@ -4381,9 +6055,7 @@ class DataSourcesAPI: advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4395,9 +6067,8 @@ def list(self) -> Iterator[DataSource]: API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list + instead. :returns: Iterator over :class:`DataSource` """ @@ -4421,9 +6092,7 @@ class DbsqlPermissionsAPI: - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4433,10 +6102,6 @@ def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse: Gets a JSON representation of the access control list (ACL) for a specified object. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str @@ -4462,10 +6127,6 @@ def set(self, Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str @@ -4494,9 +6155,8 @@ def transfer_ownership(self, Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use + :method:queries/update and :method:alerts/update respectively instead. :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. @@ -4519,13 +6179,151 @@ def transfer_ownership(self, class QueriesAPI: + """The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that + includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be + scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, query: Optional[CreateQueryRequestQuery] = None) -> Query: + """Create a query. + + Creates a query. + + :param query: :class:`CreateQueryRequestQuery` (optional) + + :returns: :class:`Query` + """ + body = {} + if query is not None: body['query'] = query.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/sql/queries', body=body, headers=headers) + return Query.from_dict(res) + + def delete(self, id: str): + """Delete a query. + + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and + cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is + permanently deleted after 30 days. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/sql/queries/{id}', headers=headers) + + def get(self, id: str) -> Query: + """Get a query. + + Gets a query. + + :param id: str + + :returns: :class:`Query` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/sql/queries/{id}', headers=headers) + return Query.from_dict(res) + + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]: + """List queries. + + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/sql/queries', query=query, headers=headers) + if 'results' in json: + for v in json['results']: + yield ListQueryObjectsResponseQuery.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def list_visualizations(self, + id: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[Visualization]: + """List visualizations on a query. + + Gets a list of visualizations on a query. + + :param id: str + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Visualization` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.0/sql/queries/{id}/visualizations', + query=query, + headers=headers) + if 'results' in json: + for v in json['results']: + yield Visualization.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryRequestQuery] = None) -> Query: + """Update a query. + + Updates a query. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param query: :class:`UpdateQueryRequestQuery` (optional) + + :returns: :class:`Query` + """ + body = {} + if query is not None: body['query'] = query.as_dict() + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/sql/queries/{id}', body=body, headers=headers) + return Query.from_dict(res) + + +class QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4539,7 +6337,7 @@ def create(self, parent: Optional[str] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None) -> Query: + tags: Optional[List[str]] = None) -> LegacyQuery: """Create a new query definition. Creates a new query definition. Queries created with this endpoint belong to the authenticated user @@ -4551,9 +6349,8 @@ def create(self, **Note**: You cannot add a visualization until you create the query. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create + instead. :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the @@ -4577,7 +6374,7 @@ def create(self, viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - :returns: :class:`Query` + :returns: :class:`LegacyQuery` """ body = {} if data_source_id is not None: body['data_source_id'] = data_source_id @@ -4591,7 +6388,7 @@ def create(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/preview/sql/queries', body=body, headers=headers) - return Query.from_dict(res) + return LegacyQuery.from_dict(res) def delete(self, query_id: str): """Delete a query. @@ -4599,9 +6396,8 @@ def delete(self, query_id: str): Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete + instead. :param query_id: str @@ -4612,32 +6408,31 @@ def delete(self, query_id: str): self._api.do('DELETE', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers) - def get(self, query_id: str) -> Query: + def get(self, query_id: str) -> LegacyQuery: """Get a query definition. Retrieve a query object definition along with contextual permissions information about the currently authenticated user. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get + instead. :param query_id: str - :returns: :class:`Query` + :returns: :class:`LegacyQuery` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers) - return Query.from_dict(res) + return LegacyQuery.from_dict(res) def list(self, *, order: Optional[str] = None, page: Optional[int] = None, page_size: Optional[int] = None, - q: Optional[str] = None) -> Iterator[Query]: + q: Optional[str] = None) -> Iterator[LegacyQuery]: """Get a list of queries. Gets a list of queries. Optionally, this list can be filtered by a search term. @@ -4645,9 +6440,8 @@ def list(self, **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list + instead. :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order @@ -4670,7 +6464,7 @@ def list(self, :param q: str (optional) Full text search term - :returns: Iterator over :class:`Query` + :returns: Iterator over :class:`LegacyQuery` """ query = {} @@ -4691,7 +6485,7 @@ def list(self, if i in seen: continue seen.add(i) - yield Query.from_dict(v) + yield LegacyQuery.from_dict(v) if 'results' not in json or not json['results']: return query['page'] += 1 @@ -4702,9 +6496,7 @@ def restore(self, query_id: str): Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. :param query_id: str @@ -4724,16 +6516,15 @@ def update(self, options: Optional[Any] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None) -> Query: + tags: Optional[List[str]] = None) -> LegacyQuery: """Change a query definition. Modify this query definition. **Note**: You cannot undo this operation. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update + instead. :param query_id: str :param data_source_id: str (optional) @@ -4756,7 +6547,7 @@ def update(self, viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - :returns: :class:`Query` + :returns: :class:`LegacyQuery` """ body = {} if data_source_id is not None: body['data_source_id'] = data_source_id @@ -4769,11 +6560,12 @@ def update(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', f'/api/2.0/preview/sql/queries/{query_id}', body=body, headers=headers) - return Query.from_dict(res) + return LegacyQuery.from_dict(res) class QueryHistoryAPI: - """Access the history of queries through SQL warehouses.""" + """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless + compute, and DLT.""" def __init__(self, api_client): self._api = api_client @@ -4781,49 +6573,109 @@ def __init__(self, api_client): def list(self, *, filter_by: Optional[QueryFilter] = None, - include_metrics: Optional[bool] = None, max_results: Optional[int] = None, - page_token: Optional[str] = None) -> Iterator[QueryInfo]: + page_token: Optional[str] = None) -> ListQueriesResponse: """List Queries. - List the history of queries through SQL warehouses. + List the history of queries through SQL warehouses, serverless compute, and DLT. - You can filter by user ID, warehouse ID, status, and time range. + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are + returned first (up to max_results in request). The pagination token returned in response can be used + to list subsequent query statuses. :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. - :param include_metrics: bool (optional) - Whether to include metrics about query. :param max_results: int (optional) - Limit the number of results returned in one page. The default is 100. + Limit the number of results returned in one page. Must be less than 1000 and the default is 100. :param page_token: str (optional) A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by - %2B. + %2B. This field is optional. - :returns: Iterator over :class:`QueryInfo` + :returns: :class:`ListQueriesResponse` """ query = {} if filter_by is not None: query['filter_by'] = filter_by.as_dict() - if include_metrics is not None: query['include_metrics'] = include_metrics if max_results is not None: query['max_results'] = max_results if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - while True: - json = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers) - if 'res' in json: - for v in json['res']: - yield QueryInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] + res = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers) + return ListQueriesResponse.from_dict(res) class QueryVisualizationsAPI: + """This is an evolving API that facilitates the addition and removal of visualizations from existing queries + in the Databricks Workspace. Data structures can change over time.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization: + """Add a visualization to a query. + + Adds a visualization to a query. + + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) + + :returns: :class:`Visualization` + """ + body = {} + if visualization is not None: body['visualization'] = visualization.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/sql/visualizations', body=body, headers=headers) + return Visualization.from_dict(res) + + def delete(self, id: str): + """Remove a visualization. + + Removes a visualization. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/sql/visualizations/{id}', headers=headers) + + def update(self, + id: str, + update_mask: str, + *, + visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization: + """Update a visualization. + + Updates a visualization. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) + + :returns: :class:`Visualization` + """ + body = {} + if update_mask is not None: body['update_mask'] = update_mask + if visualization is not None: body['visualization'] = visualization.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/sql/visualizations/{id}', body=body, headers=headers) + return Visualization.from_dict(res) + + +class QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries - within the Databricks Workspace. Data structures may change over time.""" + within the Databricks Workspace. Data structures may change over time. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4834,9 +6686,14 @@ def create(self, options: Any, *, description: Optional[str] = None, - name: Optional[str] = None) -> Visualization: + name: Optional[str] = None) -> LegacyVisualization: """Add visualization to a query. + Creates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/create instead. + :param query_id: str The identifier returned by :method:queries/create :param type: str @@ -4849,7 +6706,7 @@ def create(self, :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. - :returns: :class:`Visualization` + :returns: :class:`LegacyVisualization` """ body = {} if description is not None: body['description'] = description @@ -4860,11 +6717,16 @@ def create(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/preview/sql/visualizations', body=body, headers=headers) - return Visualization.from_dict(res) + return LegacyVisualization.from_dict(res) def delete(self, id: str): """Remove visualization. + Removes a visualization from the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/delete instead. + :param id: str Widget ID returned by :method:queryvizualisations/create @@ -4882,11 +6744,16 @@ def update(self, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, - query: Optional[Query] = None, + query: Optional[LegacyQuery] = None, type: Optional[str] = None, - updated_at: Optional[str] = None) -> Visualization: + updated_at: Optional[str] = None) -> LegacyVisualization: """Edit existing visualization. + Updates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/update instead. + :param id: str The UUID for this visualization. :param created_at: str (optional) @@ -4897,12 +6764,12 @@ def update(self, :param options: Any (optional) The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON. - :param query: :class:`Query` (optional) + :param query: :class:`LegacyQuery` (optional) :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) - :returns: :class:`Visualization` + :returns: :class:`LegacyVisualization` """ body = {} if created_at is not None: body['created_at'] = created_at @@ -4915,7 +6782,7 @@ def update(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', f'/api/2.0/preview/sql/visualizations/{id}', body=body, headers=headers) - return Visualization.from_dict(res) + return LegacyVisualization.from_dict(res) class StatementExecutionAPI: @@ -5034,7 +6901,7 @@ def execute_statement(self, parameters: Optional[List[StatementParameterListItem]] = None, row_limit: Optional[int] = None, schema: Optional[str] = None, - wait_timeout: Optional[str] = None) -> ExecuteStatementResponse: + wait_timeout: Optional[str] = None) -> StatementResponse: """Execute a SQL statement. :param statement: str @@ -5161,7 +7028,7 @@ def execute_statement(self, the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - :returns: :class:`ExecuteStatementResponse` + :returns: :class:`StatementResponse` """ body = {} if byte_limit is not None: body['byte_limit'] = byte_limit @@ -5178,9 +7045,9 @@ def execute_statement(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/sql/statements/', body=body, headers=headers) - return ExecuteStatementResponse.from_dict(res) + return StatementResponse.from_dict(res) - def get_statement(self, statement_id: str) -> GetStatementResponse: + def get_statement(self, statement_id: str) -> StatementResponse: """Get status, manifest, and result first chunk. This request can be used to poll for the statement's status. When the `status.state` field is @@ -5195,13 +7062,13 @@ def get_statement(self, statement_id: str) -> GetStatementResponse: The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - :returns: :class:`GetStatementResponse` + :returns: :class:`StatementResponse` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.0/sql/statements/{statement_id}', headers=headers) - return GetStatementResponse.from_dict(res) + return StatementResponse.from_dict(res) def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData: """Get result chunk by index. diff --git a/tests/test_core.py b/tests/test_core.py index 057147159..cc7926a72 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -345,13 +345,13 @@ def test_shares(config, requests_mock): def test_deletes(config, requests_mock): - requests_mock.delete("http://localhost/api/2.0/preview/sql/alerts/alertid", + requests_mock.delete("http://localhost/api/2.0/sql/alerts/alertId", request_headers={"User-Agent": config.user_agent}, text="null", ) w = WorkspaceClient(config=config) - res = w.alerts.delete(alert_id="alertId") + res = w.alerts.delete(id="alertId") assert requests_mock.call_count == 1 assert requests_mock.called From a2013654f050defa4cdb18a6a2d0f4596a362c2f Mon Sep 17 00:00:00 2001 From: Hector Castejon Diaz Date: Mon, 22 Jul 2024 14:45:22 +0200 Subject: [PATCH 2/2] Fix tests --- tests/integration/test_sql.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_sql.py b/tests/integration/test_sql.py index 3e178dc3a..af368609b 100644 --- a/tests/integration/test_sql.py +++ b/tests/integration/test_sql.py @@ -11,5 +11,5 @@ def date_to_ms(date): filter = QueryFilter(query_start_time_range=TimeRange(start_time_ms=date_to_ms('2023-01-01'), end_time_ms=date_to_ms('2023-01-02'))) queries = w.query_history.list(filter_by=filter) - for q in queries: + for q in queries.res: print(q)