diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md index 19dcaa4c50..9787824a39 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -45,4 +45,4 @@ Are there any other GitHub issues, whether open or closed, that are related to t - #6017 ---> \ No newline at end of file +--> diff --git a/docs/guides/unity-catalog-azure.md b/docs/guides/unity-catalog-azure.md index 851ed5dccf..6e55d4f32f 100644 --- a/docs/guides/unity-catalog-azure.md +++ b/docs/guides/unity-catalog-azure.md @@ -250,6 +250,7 @@ resource "azurerm_role_assignment" "ext_storage" { scope = azurerm_storage_account.ext_storage.id role_definition_name = "Storage Blob Data Contributor" principal_id = azapi_resource.ext_access_connector.identity[0].principal_id +} ``` Then create the [databricks_storage_credential](../resources/storage_credential.md) and [databricks_external_location](../resources/external_location.md) in Unity Catalog. diff --git a/docs/resources/group_role.md b/docs/resources/group_role.md index 2fe3d9030c..c2b9b9f61b 100644 --- a/docs/resources/group_role.md +++ b/docs/resources/group_role.md @@ -13,8 +13,8 @@ resource "databricks_group" "my_group" { } resource "databricks_group_role" "my_group_role" { - group_id = databricks_group.my_group.id - role = "arn:aws:iam::000000000000:role/my-role" + group_id = databricks_group.my_group.id + role = "arn:aws:iam::000000000000:role/my-role" } ``` diff --git a/docs/resources/recipient.md b/docs/resources/recipient.md index 6da0e2c63c..56ec12ac4c 100644 --- a/docs/resources/recipient.md +++ b/docs/resources/recipient.md @@ -16,19 +16,19 @@ authenticate to the sharing server to access data. This is for when the recipien ```hcl resource "random_password" "db2opensharecode" { - length = 16 - special = true + length = 16 + special = true } data "databricks_current_user" "current" {} resource "databricks_recipient" "db2open" { - name = "${data.databricks_current_user.current.alphanumeric}-recipient" - comment = "made by terraform" + name = "${data.databricks_current_user.current.alphanumeric}-recipient" + comment = "made by terraform" authentication_type = "TOKEN" - sharing_code = random_password.db2opensharecode.result + sharing_code = random_password.db2opensharecode.result ip_access_list { - allowed_ip_addresses = [...] // .. fill in allowed IPv4 addresses (CIDR notation allowed) + allowed_ip_addresses = [] // .. fill in allowed IPv4 addresses (CIDR notation allowed) } } ``` @@ -46,16 +46,16 @@ resource "databricks_metastore" "recipient_metastore" { name = "recipient" storage_root = format("abfss://%s@%s.dfs.core.windows.net/", azurerm_storage_account.unity_catalog.name, - azurerm_storage_container.unity_catalog.name) - delta_sharing_scope = "INTERNAL" + azurerm_storage_container.unity_catalog.name) + delta_sharing_scope = "INTERNAL" delta_sharing_recipient_token_lifetime_in_seconds = "60000000" - force_destroy = true + force_destroy = true } resource "databricks_recipient" "db2db" { - name = "${data.databricks_current_user.current.alphanumeric}-recipient" - comment = "made by terraform" - authentication_type = "DATABRICKS" + name = "${data.databricks_current_user.current.alphanumeric}-recipient" + comment = "made by terraform" + authentication_type = "DATABRICKS" data_recipient_global_metastore_id = databricks_metastore.recipient_metastore.global_metastore_id } ``` @@ -75,9 +75,9 @@ The following arguments are required: Only one `ip_access_list` blocks is allowed in a recipient. It conflicts with authentication type DATABRICKS. ```hcl - ip_access_list { - allowed_ip_addresses = ["0.0.0.0/0"] - } +ip_access_list { + allowed_ip_addresses = ["0.0.0.0/0"] +} ``` Arguments for the `ip_access_list` block are: diff --git a/docs/resources/sql_query.md b/docs/resources/sql_query.md index 25b3142850..32a043e52c 100644 --- a/docs/resources/sql_query.md +++ b/docs/resources/sql_query.md @@ -15,8 +15,14 @@ A query may have one or more [visualizations](sql_visualization.md). resource "databricks_sql_query" "q1" { data_source_id = databricks_sql_endpoint.example.data_source_id name = "My Query Name" - query = "SELECT {{ p1 }} AS p1, 2 as p2" - run_as_role = "viewer" + query = < date '{{ p3 }}' + EOT + + run_as_role = "viewer" schedule { continuous { @@ -32,6 +38,31 @@ resource "databricks_sql_query" "q1" { } } + parameter { + name = "p2" + title = "Title for p2" + enum { + options = ["default", "foo", "bar"] + value = "default" + // passes to sql query as string `"foo", "bar"` if foo and bar are both selected in the front end + multiple { + prefix = "\"" + suffix = "\"" + separator = "," + } + + } + } + + parameter { + name = "p3" + title = "Title for p3" + date { + value = "2022-01-01" + } + } + + tags = [ "t1", "t2", @@ -66,6 +97,10 @@ You can import a `databricks_sql_query` resource with ID like the following: $ terraform import databricks_sql_query.this ``` +## Troubleshooting + +In case you see `Error: cannot create sql query: Internal Server Error` during `terraform apply`; double check that you are using the correct [`data_source_id`](sql_endpoint.md) + ## Related Resources The following resources are often used in the same context: diff --git a/docs/resources/storage_credential.md b/docs/resources/storage_credential.md index 1329d8017b..73780c5fb1 100644 --- a/docs/resources/storage_credential.md +++ b/docs/resources/storage_credential.md @@ -34,7 +34,7 @@ For Azure ```hcl data "azurerm_resource_group" "this" { - name = "example-rg" + name = "example-rg" } resource "azapi_resource" "access_connector" {