diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 7b15f31f10..cc9ec137b2 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -4,6 +4,17 @@ This document is meant to help you migrate your Terraform config to the new newe describe deprecations or breaking changes and help you to change your configuration to keep the same (or similar) behavior across different versions. +## v0.91.0 ➞ v0.92.0 +### snowflake_database new alternatives +As part of the [preparation for v1](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#preparing-essential-ga-objects-for-the-provider-v1), we split up the database resource into multiple ones: +- Standard database (in progress) +- Shared database - can be used as `snowflake_shared_database` (used to create databases from externally defined shares) +- Secondary database - can be used as `snowflake_secondary_database` (used to create replicas of databases from external sources) +From now on, please migrate and use the new database resources for their unique use cases. For more information, see the documentation for those resources on the [Terraform Registry](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs). + +The split was done (and will be done for several objects during the refactor) to simplify the resource on maintainability and usage level. +Its purpose was also to divide the resources by their specific purpose rather than cramping every use case of an object into one resource. + ## v0.89.0 ➞ v0.90.0 ### snowflake_table resource changes #### *(behavior change)* Validation to column type added @@ -23,7 +34,7 @@ resource "snowflake_tag_masking_policy_association" "name" { masking_policy_id = snowflake_masking_policy.example_masking_policy.id } ``` - + After ```terraform resource "snowflake_tag_masking_policy_association" "name" { diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md new file mode 100644 index 0000000000..63383d4755 --- /dev/null +++ b/docs/resources/secondary_database.md @@ -0,0 +1,98 @@ +--- +page_title: "snowflake_secondary_database Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + A secondary database creates a replica of an existing primary database (i.e. a secondary database). For more information about database replication, see Introduction to database replication across multiple accounts https://docs.snowflake.com/en/user-guide/db-replication-intro. +--- + +# snowflake_secondary_database (Resource) + +A secondary database creates a replica of an existing primary database (i.e. a secondary database). For more information about database replication, see [Introduction to database replication across multiple accounts](https://docs.snowflake.com/en/user-guide/db-replication-intro). + +## Example Usage + +```terraform +# 1. Preparing primary database +resource "snowflake_database" "primary" { + provider = primary_account # notice the provider fields + name = "database_name" + replication_configuration { + accounts = ["."] + ignore_edition_check = true + } +} + +# 2. Creating secondary database +resource "snowflake_secondary_database" "test" { + provider = secondary_account + name = snowflake_database.primary.name # It's recommended to give a secondary database the same name as its primary database + as_replica_of = "..${snowflake_database.primary.name}" + is_transient = false + + data_retention_time_in_days { + value = 10 + } + + max_data_extension_time_in_days { + value = 20 + } + + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A secondary database" +} +``` + + +## Schema + +### Required + +- `as_replica_of` (String) A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `""."".""`. +- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. + +### Optional + +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. +- `comment` (String) Specifies a comment for the database. +- `data_retention_time_in_days` (Block List, Max: 1) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). (see [below for nested schema](#nestedblock--data_retention_time_in_days)) +- `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. +- `is_transient` (Boolean) Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. +- `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). +- `max_data_extension_time_in_days` (Block List, Max: 1) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). (see [below for nested schema](#nestedblock--max_data_extension_time_in_days)) +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. +- `storage_serialization_policy` (String) Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). + +### Read-Only + +- `id` (String) The ID of this resource. + + +### Nested Schema for `data_retention_time_in_days` + +Required: + +- `value` (Number) + + + +### Nested Schema for `max_data_extension_time_in_days` + +Required: + +- `value` (Number) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_secondary_database.example 'secondary_database_name' +``` diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md new file mode 100644 index 0000000000..271a1b7e01 --- /dev/null +++ b/docs/resources/shared_database.md @@ -0,0 +1,81 @@ +--- +page_title: "snowflake_shared_database Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see Introduction to Secure Data Sharing https://docs.snowflake.com/en/user-guide/data-sharing-intro. +--- + +# snowflake_shared_database (Resource) + +A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see [Introduction to Secure Data Sharing](https://docs.snowflake.com/en/user-guide/data-sharing-intro). + +## Example Usage + +```terraform +# 1. Preparing database to share +resource "snowflake_share" "test" { + provider = primary_account # notice the provider fields + name = "share_name" + accounts = ["."] +} + +resource "snowflake_database" "test" { + provider = primary_account + name = "shared_database" +} + +resource "snowflake_grant_privileges_to_share" "test" { + provider = primary_account + to_share = snowflake_share.test.name + privileges = ["USAGE"] + on_database = snowflake_database.test.name +} + +# 2. Creating shared database +resource "snowflake_shared_database" "test" { + provider = secondary_account + depends_on = [snowflake_grant_privileges_to_share.test] + name = snowflake_database.test.name # shared database should have the same as the "imported" one + from_share = "..${snowflake_share.test.name}" + is_transient = false + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A shared database" +} +``` + + +## Schema + +### Required + +- `from_share` (String) A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `"".""`. +- `name` (String) Specifies the identifier for the database; must be unique for your account. + +### Optional + +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. +- `comment` (String) Specifies a comment for the database. +- `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. +- `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. +- `storage_serialization_policy` (String) Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). + +### Read-Only + +- `id` (String) The ID of this resource. + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_shared_database.example 'shared_database_name' +``` diff --git a/examples/resources/snowflake_secondary_database/import.sh b/examples/resources/snowflake_secondary_database/import.sh new file mode 100644 index 0000000000..f183eac8ac --- /dev/null +++ b/examples/resources/snowflake_secondary_database/import.sh @@ -0,0 +1 @@ +terraform import snowflake_secondary_database.example 'secondary_database_name' diff --git a/examples/resources/snowflake_secondary_database/resource.tf b/examples/resources/snowflake_secondary_database/resource.tf new file mode 100644 index 0000000000..dd606162ef --- /dev/null +++ b/examples/resources/snowflake_secondary_database/resource.tf @@ -0,0 +1,34 @@ +# 1. Preparing primary database +resource "snowflake_database" "primary" { + provider = primary_account # notice the provider fields + name = "database_name" + replication_configuration { + accounts = ["."] + ignore_edition_check = true + } +} + +# 2. Creating secondary database +resource "snowflake_secondary_database" "test" { + provider = secondary_account + name = snowflake_database.primary.name # It's recommended to give a secondary database the same name as its primary database + as_replica_of = "..${snowflake_database.primary.name}" + is_transient = false + + data_retention_time_in_days { + value = 10 + } + + max_data_extension_time_in_days { + value = 20 + } + + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A secondary database" +} diff --git a/examples/resources/snowflake_shared_database/import.sh b/examples/resources/snowflake_shared_database/import.sh new file mode 100644 index 0000000000..6cf900566c --- /dev/null +++ b/examples/resources/snowflake_shared_database/import.sh @@ -0,0 +1 @@ +terraform import snowflake_shared_database.example 'shared_database_name' diff --git a/examples/resources/snowflake_shared_database/resource.tf b/examples/resources/snowflake_shared_database/resource.tf new file mode 100644 index 0000000000..7f506bccf9 --- /dev/null +++ b/examples/resources/snowflake_shared_database/resource.tf @@ -0,0 +1,35 @@ +# 1. Preparing database to share +resource "snowflake_share" "test" { + provider = primary_account # notice the provider fields + name = "share_name" + accounts = ["."] +} + +resource "snowflake_database" "test" { + provider = primary_account + name = "shared_database" +} + +resource "snowflake_grant_privileges_to_share" "test" { + provider = primary_account + to_share = snowflake_share.test.name + privileges = ["USAGE"] + on_database = snowflake_database.test.name +} + +# 2. Creating shared database +resource "snowflake_shared_database" "test" { + provider = secondary_account + depends_on = [snowflake_grant_privileges_to_share.test] + name = snowflake_database.test.name # shared database should have the same as the "imported" one + from_share = "..${snowflake_share.test.name}" + is_transient = false + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A shared database" +} diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index 5e7b996222..31273ce056 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -136,12 +136,18 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Schema: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Schemas.ShowByID) }, + resources.SecondaryDatabase: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Databases.ShowByID) + }, resources.Sequence: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Sequences.ShowByID) }, resources.Share: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Shares.ShowByID) }, + resources.SharedDatabase: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Databases.ShowByID) + }, resources.Stage: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Stages.ShowByID) }, diff --git a/pkg/acceptance/helpers/database_client.go b/pkg/acceptance/helpers/database_client.go index 308130d8c8..ad050aff03 100644 --- a/pkg/acceptance/helpers/database_client.go +++ b/pkg/acceptance/helpers/database_client.go @@ -25,6 +25,30 @@ func (c *DatabaseClient) client() sdk.Databases { return c.context.client.Databases } +func (c *DatabaseClient) CreatePrimaryDatabase(t *testing.T, enableReplicationTo []sdk.AccountIdentifier) (*sdk.Database, sdk.ExternalObjectIdentifier, func()) { + t.Helper() + ctx := context.Background() + + primaryDatabase, primaryDatabaseCleanup := c.CreateDatabase(t) + + err := c.client().AlterReplication(ctx, primaryDatabase.ID(), &sdk.AlterDatabaseReplicationOptions{ + EnableReplication: &sdk.EnableReplication{ + ToAccounts: enableReplicationTo, + IgnoreEditionCheck: sdk.Bool(true), + }, + }) + require.NoError(t, err) + + organizationName, err := c.context.client.ContextFunctions.CurrentOrganizationName(ctx) + require.NoError(t, err) + + accountName, err := c.context.client.ContextFunctions.CurrentAccountName(ctx) + require.NoError(t, err) + + externalPrimaryId := sdk.NewExternalObjectIdentifier(sdk.NewAccountIdentifier(organizationName, accountName), primaryDatabase.ID()) + return primaryDatabase, externalPrimaryId, primaryDatabaseCleanup +} + func (c *DatabaseClient) CreateDatabase(t *testing.T) (*sdk.Database, func()) { t.Helper() return c.CreateDatabaseWithOptions(t, c.ids.RandomAccountObjectIdentifier(), &sdk.CreateDatabaseOptions{}) diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 457f3e0b14..90c3b25a44 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -494,9 +494,11 @@ func getResources() map[string]*schema.Resource { "snowflake_saml_integration": resources.SAMLIntegration(), "snowflake_schema": resources.Schema(), "snowflake_scim_integration": resources.SCIMIntegration(), + "snowflake_secondary_database": resources.SecondaryDatabase(), "snowflake_sequence": resources.Sequence(), "snowflake_session_parameter": resources.SessionParameter(), "snowflake_share": resources.Share(), + "snowflake_shared_database": resources.SharedDatabase(), "snowflake_stage": resources.Stage(), "snowflake_storage_integration": resources.StorageIntegration(), "snowflake_stream": resources.Stream(), diff --git a/pkg/provider/resources/resources.go b/pkg/provider/resources/resources.go index 29f436295e..550836803e 100644 --- a/pkg/provider/resources/resources.go +++ b/pkg/provider/resources/resources.go @@ -28,8 +28,10 @@ const ( Role resource = "snowflake_role" RowAccessPolicy resource = "snowflake_row_access_policy" Schema resource = "snowflake_schema" + SecondaryDatabase resource = "snowflake_secondary_database" Sequence resource = "snowflake_sequence" Share resource = "snowflake_share" + SharedDatabase resource = "snowflake_shared_database" Stage resource = "snowflake_stage" StorageIntegration resource = "snowflake_storage_integration" Stream resource = "snowflake_stream" diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go new file mode 100644 index 0000000000..270766a957 --- /dev/null +++ b/pkg/resources/custom_diffs.go @@ -0,0 +1,49 @@ +package resources + +import ( + "context" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// NestedIntValueAccountObjectComputedIf is NestedValueComputedIf, +// but dedicated for account level objects with integer-typed properties. +func NestedIntValueAccountObjectComputedIf(key string, parameter sdk.AccountParameter) schema.CustomizeDiffFunc { + return NestedValueComputedIf( + key, + func(client *sdk.Client) (*sdk.Parameter, error) { + return client.Parameters.ShowAccountParameter(context.Background(), parameter) + }, + func(v any) string { return strconv.Itoa(v.(int)) }, + ) +} + +// NestedValueComputedIf internally calls schema.ResourceDiff.SetNewComputed whenever the inner function returns true. +// It's main purpose was to use it with hierarchical values that are marked with Computed and Optional. Such values should +// be recomputed whenever the value is not in the configuration and the remote value is not equal to the value in state. +func NestedValueComputedIf(key string, showParam func(client *sdk.Client) (*sdk.Parameter, error), valueToString func(v any) string) schema.CustomizeDiffFunc { + return customdiff.ComputedIf(key, func(ctx context.Context, d *schema.ResourceDiff, meta interface{}) bool { + configValue, ok := d.GetRawConfig().AsValueMap()[key] + if ok && len(configValue.AsValueSlice()) == 1 { + return false + } + + client := meta.(*provider.Context).Client + + param, err := showParam(client) + if err != nil { + return false + } + + stateValue := d.Get(key).([]any) + if len(stateValue) != 1 { + return false + } + + return param.Value != valueToString(stateValue[0].(map[string]any)["value"]) + }) +} diff --git a/pkg/resources/custom_diffs_test.go b/pkg/resources/custom_diffs_test.go new file mode 100644 index 0000000000..bc7172dc03 --- /dev/null +++ b/pkg/resources/custom_diffs_test.go @@ -0,0 +1,152 @@ +package resources_test + +import ( + "context" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNestedValueComputedIf(t *testing.T) { + customDiff := resources.NestedValueComputedIf( + "nested_value", + func(client *sdk.Client) (*sdk.Parameter, error) { + return &sdk.Parameter{ + Key: "Parameter", + Value: "snow-value", + }, nil + }, + func(v any) string { return v.(string) }, + ) + providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeString, customDiff) + + t.Run("value set in the configuration and state", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapVal(map[string]cty.Value{ + "nested_value": cty.ListVal([]cty.Value{ + cty.MapVal(map[string]cty.Value{ + "value": cty.NumberIntVal(123), + }), + }), + }), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": 123, + }, + }, + }) + assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("value set only in the configuration", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapVal(map[string]cty.Value{ + "nested_value": cty.ListVal([]cty.Value{ + cty.MapVal(map[string]cty.Value{ + "value": cty.NumberIntVal(123), + }), + }), + }), map[string]any{}) + assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("value set in the state and not equals with parameter", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": "value-to-change", + }, + }, + }) + assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("value set in the state and equals with parameter", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": "snow-value", + }, + }, + }) + assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + }) +} + +func TestNestedIntValueAccountObjectComputedIf(t *testing.T) { + providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeInt, resources.NestedIntValueAccountObjectComputedIf("nested_value", sdk.AccountParameterDataRetentionTimeInDays)) + + t.Run("different value than on the Snowflake side", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": 999, // value outside of valid range + }, + }, + }) + assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("same value as in Snowflake", func(t *testing.T) { + dataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": dataRetentionTimeInDays.Value, + }, + }, + }) + assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + }) +} + +func createProviderWithNestedValueAndCustomDiff(t *testing.T, valueType schema.ValueType, customDiffFunc schema.CustomizeDiffFunc) *schema.Provider { + t.Helper() + return &schema.Provider{ + ResourcesMap: map[string]*schema.Resource{ + "test": { + Schema: map[string]*schema.Schema{ + "nested_value": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: valueType, + Required: true, + }, + }, + }, + Computed: true, + Optional: true, + }, + }, + CustomizeDiff: customDiffFunc, + }, + }, + } +} + +func calculateDiff(t *testing.T, providerConfig *schema.Provider, rawConfigValue cty.Value, stateValue map[string]any) *terraform.InstanceDiff { + t.Helper() + diff, err := providerConfig.ResourcesMap["test"].Diff( + context.Background(), + &terraform.InstanceState{ + RawConfig: rawConfigValue, + }, + &terraform.ResourceConfig{ + Config: stateValue, + }, + &provider.Context{Client: acc.Client(t)}, + ) + require.NoError(t, err) + return diff +} diff --git a/pkg/resources/database.go b/pkg/resources/database.go index 2738d9f712..d6fd261400 100644 --- a/pkg/resources/database.go +++ b/pkg/resources/database.go @@ -62,6 +62,32 @@ var databaseSchema = map[string]*schema.Schema{ ForceNew: true, ConflictsWith: []string{"from_share", "from_database"}, }, + // TODO: Add accounts for replication (it will promote local database to serve as a primary database for replication). + // "accounts for replication": { + // Type: schema.TypeList, + // Required: true, + // MinItems: 1, + // Elem: &schema.Schema{ + // Type: schema.TypeString, + // // TODO(ticket-number): Validate account identifiers. + // }, + // // TODO: Desc + // }, + // "accounts for failover": { + // Type: schema.TypeList, + // Required: true, + // MinItems: 1, + // Elem: &schema.Schema{ + // Type: schema.TypeString, + // // TODO(ticket-number): Validate account identifiers. + // }, + // // TODO: Desc + // }, + // "ignore_edition_check": { + // Type: schema.TypeBool, + // // TODO: Desc + // Optional: true, + // }, "replication_configuration": { Type: schema.TypeList, Description: "When set, specifies the configurations for database replication.", diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index d9b30ad651..9400cdb5aa 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -130,6 +130,38 @@ func GetPropertyAsPointer[T any](d *schema.ResourceData, property string) *T { return &typedValue } +// GetPropertyOfFirstNestedObjectByKey should be used for single objects defined in the Terraform schema as +// schema.TypeList with MaxItems set to one and inner schema with single value. To easily retrieve +// the inner value, you can specify the top-level property with propertyKey and the nested value with nestedValueKey. +func GetPropertyOfFirstNestedObjectByKey[T any](d *schema.ResourceData, propertyKey string, nestedValueKey string) (*T, error) { + value, ok := d.GetOk(propertyKey) + if !ok { + return nil, fmt.Errorf("nested property %s not found", propertyKey) + } + + typedValue, ok := value.([]any) + if !ok || len(typedValue) != 1 { + return nil, fmt.Errorf("nested property %s is not an array or has incorrect number of values: %d, expected: 1", propertyKey, len(typedValue)) + } + + typedNestedMap, ok := typedValue[0].(map[string]any) + if !ok { + return nil, fmt.Errorf("nested property %s is not of type map[string]any, got: %T", propertyKey, typedValue[0]) + } + + _, ok = typedNestedMap[nestedValueKey] + if !ok { + return nil, fmt.Errorf("nested value key %s couldn't be found in the nested property map %s", nestedValueKey, propertyKey) + } + + typedNestedValue, ok := typedNestedMap[nestedValueKey].(T) + if !ok { + return nil, fmt.Errorf("nested property %s.%s is not of type %T, got: %T", propertyKey, nestedValueKey, *new(T), typedNestedMap[nestedValueKey]) + } + + return &typedNestedValue, nil +} + type tags []tag func (t tags) toSnowflakeTagValues() []snowflake.TagValue { @@ -220,3 +252,21 @@ func getTags(from interface{}) (to tags) { } return to } + +func nestedProperty(innerType schema.ValueType, fieldDescription string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: innerType, + Required: true, + }, + }, + }, + Computed: true, + Optional: true, + Description: fieldDescription, + } +} diff --git a/pkg/resources/helpers_test.go b/pkg/resources/helpers_test.go index d3a1c611e6..cc9534ae7f 100644 --- a/pkg/resources/helpers_test.go +++ b/pkg/resources/helpers_test.go @@ -98,3 +98,133 @@ func queriedPrivilegesContainAtLeast(query func(client *sdk.Client, ctx context. return nil } } + +func TestGetFirstNestedObjectByKey(t *testing.T) { + d := schema.TestResourceDataRaw(t, map[string]*schema.Schema{ + "int_property": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeInt, + }, + }, + }, + }, + "string_property": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeString, + }, + }, + }, + }, + "list_property": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "multiple_list_properties": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "list": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "empty list": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "not_property": { + Type: schema.TypeString, + }, + }, map[string]any{ + "int_property": []any{ + map[string]any{ + "value": 123, + }, + }, + "string_property": []any{ + map[string]any{ + "value": "some string", + }, + }, + "list": []any{"one"}, + "empty_list": []any{}, + "list_property": []any{ + map[string]any{ + "value": []any{"one", "two", "three"}, + }, + }, + "multiple_list_properties": []any{ + map[string]any{ + "value": []any{"one", "two", "three"}, + }, + map[string]any{ + "value": []any{"one", "two", "three"}, + }, + }, + "not_property": "not a property", + }) + + intValue, err := resources.GetPropertyOfFirstNestedObjectByKey[int](d, "int_property", "value") + assert.NoError(t, err) + assert.Equal(t, 123, *intValue) + + stringValue, err := resources.GetPropertyOfFirstNestedObjectByKey[string](d, "string_property", "value") + assert.NoError(t, err) + assert.Equal(t, "some string", *stringValue) + + listValue, err := resources.GetPropertyOfFirstNestedObjectByKey[[]any](d, "list_property", "value") + assert.NoError(t, err) + assert.Equal(t, []any{"one", "two", "three"}, *listValue) + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "non_existing_property_key", "non_existing_value_key") + assert.ErrorContains(t, err, "nested property non_existing_property_key not found") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "not_property", "value") + assert.ErrorContains(t, err, "nested property not_property is not an array or has incorrect number of values: 0, expected: 1") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "empty_list", "value") + assert.ErrorContains(t, err, "nested property empty_list not found") // Empty list is a default value, so it's treated as "not set" + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "multiple_list_properties", "value") + assert.ErrorContains(t, err, "nested property multiple_list_properties is not an array or has incorrect number of values: 2, expected: 1") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "list", "value") + assert.ErrorContains(t, err, "nested property list is not of type map[string]any, got: string") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "int_property", "non_existing_value_key") + assert.ErrorContains(t, err, "nested value key non_existing_value_key couldn't be found in the nested property map int_property") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[int](d, "string_property", "value") + assert.ErrorContains(t, err, "nested property string_property.value is not of type int, got: string") +} diff --git a/pkg/resources/secondary_database.go b/pkg/resources/secondary_database.go new file mode 100644 index 0000000000..3f1b64e7a1 --- /dev/null +++ b/pkg/resources/secondary_database.go @@ -0,0 +1,444 @@ +package resources + +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var secondaryDatabaseSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database.", + }, + "as_replica_of": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `\"\".\"\".\"\"`.", + }, + "is_transient": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: "Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", + }, + "data_retention_time_in_days": nestedProperty( + schema.TypeInt, + "Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel).", + ), + "max_data_extension_time_in_days": nestedProperty( + schema.TypeInt, + "Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days).", + ), + // TODO: Below parameters should be nested properties + "external_volume": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", + }, + "catalog": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", + }, + "replace_invalid_characters": { + Type: schema.TypeBool, + Optional: true, + Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", + }, + "default_ddl_collation": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification).", + }, + "storage_serialization_policy": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), + Description: fmt.Sprintf("Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.StorageSerializationPolicyOptimized) && newValue == "" + }, + }, + "log_level": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), + }, + "trace_level": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the database.", + }, +} + +func SecondaryDatabase() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateSecondaryDatabase, + UpdateContext: UpdateSecondaryDatabase, + ReadContext: ReadSecondaryDatabase, + DeleteContext: DeleteSecondaryDatabase, + Description: "A secondary database creates a replica of an existing primary database (i.e. a secondary database). For more information about database replication, see [Introduction to database replication across multiple accounts](https://docs.snowflake.com/en/user-guide/db-replication-intro).", + + CustomizeDiff: customdiff.All( + NestedIntValueAccountObjectComputedIf("data_retention_time_in_days", sdk.AccountParameterDataRetentionTimeInDays), + NestedIntValueAccountObjectComputedIf("max_data_extension_time_in_days", sdk.AccountParameterMaxDataExtensionTimeInDays), + ), + + Schema: secondaryDatabaseSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + secondaryDatabaseId := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + primaryDatabaseId := sdk.NewExternalObjectIdentifierFromFullyQualifiedName(d.Get("as_replica_of").(string)) + + dataRetentionTimeInDays, _ := GetPropertyOfFirstNestedObjectByKey[int](d, "data_retention_time_in_days", "value") + maxDataExtensionTimeInDays, _ := GetPropertyOfFirstNestedObjectByKey[int](d, "max_data_extension_time_in_days", "value") + + var externalVolume *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("external_volume"); ok { + externalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var catalog *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("catalog"); ok { + catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var storageSerializationPolicy *sdk.StorageSerializationPolicy + if v, ok := d.GetOk("storage_serialization_policy"); ok { + storageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(v.(string))) + } + + var logLevel *sdk.LogLevel + if v, ok := d.GetOk("log_level"); ok { + logLevel = sdk.Pointer(sdk.LogLevel(v.(string))) + } + + var traceLevel *sdk.TraceLevel + if v, ok := d.GetOk("trace_level"); ok { + traceLevel = sdk.Pointer(sdk.TraceLevel(v.(string))) + } + + err := client.Databases.CreateSecondary(ctx, secondaryDatabaseId, primaryDatabaseId, &sdk.CreateSecondaryDatabaseOptions{ + Transient: GetPropertyAsPointer[bool](d, "is_transient"), + DataRetentionTimeInDays: dataRetentionTimeInDays, + MaxDataExtensionTimeInDays: maxDataExtensionTimeInDays, + ExternalVolume: externalVolume, + Catalog: catalog, + ReplaceInvalidCharacters: GetPropertyAsPointer[bool](d, "replace_invalid_characters"), + DefaultDDLCollation: GetPropertyAsPointer[string](d, "default_ddl_collation"), + StorageSerializationPolicy: storageSerializationPolicy, + LogLevel: logLevel, + TraceLevel: traceLevel, + Comment: GetPropertyAsPointer[string](d, "comment"), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(helpers.EncodeSnowflakeID(secondaryDatabaseId)) + + return ReadSecondaryDatabase(ctx, d, meta) +} + +func UpdateSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + secondaryDatabaseId := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + if d.HasChange("name") { + newId := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + err := client.Databases.Alter(ctx, secondaryDatabaseId, &sdk.AlterDatabaseOptions{ + NewName: &newId, + }) + if err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeSnowflakeID(newId)) + secondaryDatabaseId = newId + } + + var databaseSetRequest sdk.DatabaseSet + var databaseUnsetRequest sdk.DatabaseUnset + + if d.HasChange("data_retention_time_in_days") { + dataRetentionObject, ok := d.GetOk("data_retention_time_in_days") + if ok && len(dataRetentionObject.([]any)) > 0 { + dataRetentionTimeInDays, err := GetPropertyOfFirstNestedObjectByKey[int](d, "data_retention_time_in_days", "value") + if err != nil { + return diag.FromErr(err) + } + databaseSetRequest.DataRetentionTimeInDays = dataRetentionTimeInDays + } else { + databaseUnsetRequest.DataRetentionTimeInDays = sdk.Bool(true) + } + } + + if d.HasChange("max_data_extension_time_in_days") { + maxDataExtensionTimeInDaysObject, ok := d.GetOk("max_data_extension_time_in_days") + if ok && len(maxDataExtensionTimeInDaysObject.([]any)) > 0 { + maxDataExtensionTimeInDays, err := GetPropertyOfFirstNestedObjectByKey[int](d, "max_data_extension_time_in_days", "value") + if err != nil { + return diag.FromErr(err) + } + databaseSetRequest.MaxDataExtensionTimeInDays = maxDataExtensionTimeInDays + } else { + databaseUnsetRequest.MaxDataExtensionTimeInDays = sdk.Bool(true) + } + } + + if d.HasChange("external_volume") { + externalVolume := d.Get("external_volume").(string) + if len(externalVolume) > 0 { + databaseSetRequest.ExternalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(externalVolume)) + } else { + databaseUnsetRequest.ExternalVolume = sdk.Bool(true) + } + } + + if d.HasChange("catalog") { + catalog := d.Get("catalog").(string) + if len(catalog) > 0 { + databaseSetRequest.Catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(catalog)) + } else { + databaseUnsetRequest.Catalog = sdk.Bool(true) + } + } + + if d.HasChange("replace_invalid_characters") { + if d.Get("replace_invalid_characters").(bool) { + databaseSetRequest.ReplaceInvalidCharacters = sdk.Bool(true) + } else { + databaseUnsetRequest.ReplaceInvalidCharacters = sdk.Bool(true) + } + } + + if d.HasChange("default_ddl_collation") { + defaultDdlCollation := d.Get("default_ddl_collation").(string) + if len(defaultDdlCollation) > 0 { + databaseSetRequest.DefaultDDLCollation = &defaultDdlCollation + } else { + databaseUnsetRequest.DefaultDDLCollation = sdk.Bool(true) + } + } + + if d.HasChange("storage_serialization_policy") { + storageSerializationPolicy := d.Get("storage_serialization_policy").(string) + if len(storageSerializationPolicy) > 0 { + databaseSetRequest.StorageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(storageSerializationPolicy)) + } else { + databaseUnsetRequest.StorageSerializationPolicy = sdk.Bool(true) + } + } + + if d.HasChange("log_level") { + logLevel := d.Get("log_level").(string) + if len(logLevel) > 0 { + databaseSetRequest.LogLevel = sdk.Pointer(sdk.LogLevel(logLevel)) + } else { + databaseUnsetRequest.LogLevel = sdk.Bool(true) + } + } + + if d.HasChange("trace_level") { + traceLevel := d.Get("trace_level").(string) + if len(traceLevel) > 0 { + databaseSetRequest.TraceLevel = sdk.Pointer(sdk.TraceLevel(traceLevel)) + } else { + databaseUnsetRequest.TraceLevel = sdk.Bool(true) + } + } + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if len(comment) > 0 { + databaseSetRequest.Comment = &comment + } else { + databaseUnsetRequest.Comment = sdk.Bool(true) + } + } + + if (databaseSetRequest != sdk.DatabaseSet{}) { + err := client.Databases.Alter(ctx, secondaryDatabaseId, &sdk.AlterDatabaseOptions{ + Set: &databaseSetRequest, + }) + if err != nil { + return diag.FromErr(err) + } + } + + if (databaseUnsetRequest != sdk.DatabaseUnset{}) { + err := client.Databases.Alter(ctx, secondaryDatabaseId, &sdk.AlterDatabaseOptions{ + Unset: &databaseUnsetRequest, + }) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadSecondaryDatabase(ctx, d, meta) +} + +func ReadSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + secondaryDatabaseId := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + secondaryDatabase, err := client.Databases.ShowByID(ctx, secondaryDatabaseId) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query secondary database. Marking the resource as removed.", + Detail: fmt.Sprintf("DatabaseName: %s, Err: %s", secondaryDatabaseId.FullyQualifiedName(), err), + }, + } + } + return diag.FromErr(err) + } + + secondaryDatabaseParameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Database: secondaryDatabaseId, + }, + }) + if err != nil { + return diag.FromErr(err) + } + + replicationDatabases, err := client.ReplicationFunctions.ShowReplicationDatabases(ctx, &sdk.ShowReplicationDatabasesOptions{ + Like: &sdk.Like{ + Pattern: sdk.String(secondaryDatabaseId.Name()), + }, + }) + if err != nil { + return diag.FromErr(err) + } + + var replicationPrimaryDatabase *sdk.ReplicationDatabase + for _, replicationDatabase := range replicationDatabases { + replicationDatabase := replicationDatabase + if !replicationDatabase.IsPrimary && + replicationDatabase.AccountLocator == client.GetAccountLocator() && + replicationDatabase.Name == secondaryDatabaseId.Name() { + replicationPrimaryDatabase = &replicationDatabase + } + } + if replicationPrimaryDatabase == nil { + return diag.FromErr(fmt.Errorf("could not find replication database for %s", secondaryDatabaseId.Name())) + } + + if err := d.Set("name", secondaryDatabase.Name); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("as_replica_of", sdk.NewExternalObjectIdentifierFromFullyQualifiedName(replicationPrimaryDatabase.PrimaryDatabase).FullyQualifiedName()); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("is_transient", secondaryDatabase.Transient); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("data_retention_time_in_days", []any{map[string]any{"value": secondaryDatabase.RetentionTime}}); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("comment", secondaryDatabase.Comment); err != nil { + return diag.FromErr(err) + } + + for _, secondaryDatabaseParameter := range secondaryDatabaseParameters { + switch secondaryDatabaseParameter.Key { + case "MAX_DATA_EXTENSION_TIME_IN_DAYS": + maxDataExtensionTimeInDays, err := strconv.Atoi(secondaryDatabaseParameter.Value) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set("max_data_extension_time_in_days", []any{map[string]any{"value": maxDataExtensionTimeInDays}}); err != nil { + return diag.FromErr(err) + } + case "EXTERNAL_VOLUME": + if err := d.Set("external_volume", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "CATALOG": + if err := d.Set("catalog", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "DEFAULT_DDL_COLLATION": + if err := d.Set("default_ddl_collation", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "LOG_LEVEL": + if err := d.Set("log_level", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "TRACE_LEVEL": + if err := d.Set("trace_level", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "REPLACE_INVALID_CHARACTERS": + boolValue, err := strconv.ParseBool(secondaryDatabaseParameter.Value) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set("replace_invalid_characters", boolValue); err != nil { + return diag.FromErr(err) + } + case "STORAGE_SERIALIZATION_POLICY": + if err := d.Set("storage_serialization_policy", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + } + } + + return nil +} + +func DeleteSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + err := client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ + IfExists: sdk.Bool(true), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/secondary_database_acceptance_test.go b/pkg/resources/secondary_database_acceptance_test.go new file mode 100644 index 0000000000..cf58d60348 --- /dev/null +++ b/pkg/resources/secondary_database_acceptance_test.go @@ -0,0 +1,404 @@ +package resources_test + +import ( + "context" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/stretchr/testify/require" +) + +func TestAcc_CreateSecondaryDatabase_minimal(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + + _, externalPrimaryId, primaryDatabaseCleanup := acc.SecondaryTestClient().Database.CreatePrimaryDatabase(t, []sdk.AccountIdentifier{ + acc.TestClient().Account.GetAccountIdentifier(t), + }) + t.Cleanup(primaryDatabaseCleanup) + + newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newComment := random.Comment() + + accountDataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + accountMaxDataExtensionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterMaxDataExtensionTimeInDays) + require.NoError(t, err) + + configVariables := func(id sdk.AccountObjectIdentifier, primaryDatabaseName sdk.ExternalObjectIdentifier, comment string) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "as_replica_of": config.StringVariable(primaryDatabaseName.FullyQualifiedName()), + "comment": config.StringVariable(comment), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(id, externalPrimaryId, comment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", accountMaxDataExtensionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", comment), + ), + }, + // Rename + comment update + { + ConfigVariables: configVariables(newId, externalPrimaryId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", newId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", accountMaxDataExtensionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", newComment), + ), + }, + // Import all values + { + ConfigVariables: configVariables(newId, externalPrimaryId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/basic"), + ResourceName: "snowflake_secondary_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSecondaryDatabase_complete(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + + _, externalPrimaryId, primaryDatabaseCleanup := acc.SecondaryTestClient().Database.CreatePrimaryDatabase(t, []sdk.AccountIdentifier{ + sdk.NewAccountIdentifierFromAccountLocator(acc.Client(t).GetAccountLocator()), + }) + t.Cleanup(primaryDatabaseCleanup) + + externalVolumeId, externalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(externalVolumeCleanup) + + catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(catalogCleanup) + + newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newComment := random.Comment() + + newExternalVolumeId, newExternalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(newExternalVolumeCleanup) + + newCatalogId, newCatalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(newCatalogCleanup) + + accountDataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + accountMaxDataExtensionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterMaxDataExtensionTimeInDays) + require.NoError(t, err) + + configVariables := func( + id sdk.AccountObjectIdentifier, + primaryDatabaseName sdk.ExternalObjectIdentifier, + transient bool, + dataRetentionTimeInDays *int, + maxDataExtensionTimeInDays *int, + externalVolume string, + catalog string, + replaceInvalidCharacters bool, + defaultDdlCollation string, + storageSerializationPolicy sdk.StorageSerializationPolicy, + logLevel sdk.LogLevel, + traceLevel sdk.TraceLevel, + comment string, + ) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(id.Name()), + "as_replica_of": config.StringVariable(primaryDatabaseName.FullyQualifiedName()), + "transient": config.BoolVariable(transient), + "external_volume": config.StringVariable(externalVolume), + "catalog": config.StringVariable(catalog), + "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), + "default_ddl_collation": config.StringVariable(defaultDdlCollation), + "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), + "log_level": config.StringVariable(string(logLevel)), + "trace_level": config.StringVariable(string(traceLevel)), + "comment": config.StringVariable(comment), + } + if dataRetentionTimeInDays != nil { + variables["data_retention_time_in_days"] = config.IntegerVariable(*dataRetentionTimeInDays) + } + if maxDataExtensionTimeInDays != nil { + variables["max_data_extension_time_in_days"] = config.IntegerVariable(*maxDataExtensionTimeInDays) + } + return variables + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SecondaryDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables( + id, + externalPrimaryId, + false, + sdk.Int(2), + sdk.Int(5), + externalVolumeId.Name(), + catalogId.Name(), + true, + "en_US", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "is_transient", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "2"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", "5"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyOptimized)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", comment), + ), + }, + { + ConfigVariables: configVariables( + newId, + externalPrimaryId, + false, + nil, + nil, + newExternalVolumeId.Name(), + newCatalogId.Name(), + false, + "en_GB", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelDebug, + sdk.TraceLevelAlways, + newComment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", newId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "is_transient", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", accountMaxDataExtensionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", newExternalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", newCatalogId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", "en_GB"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyOptimized)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", string(sdk.LogLevelDebug)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", string(sdk.TraceLevelAlways)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", newComment), + ), + }, + { + ConfigVariables: configVariables( + id, + externalPrimaryId, + false, + sdk.Int(2), + sdk.Int(5), + externalVolumeId.Name(), + catalogId.Name(), + true, + "en_US", + sdk.StorageSerializationPolicyCompatible, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "is_transient", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "2"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", "5"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", comment), + ), + }, + // Import all values + { + ConfigVariables: configVariables( + id, + externalPrimaryId, + false, + sdk.Int(2), + sdk.Int(5), + externalVolumeId.Name(), + catalogId.Name(), + true, + "en_US", + sdk.StorageSerializationPolicyCompatible, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + ResourceName: "snowflake_secondary_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSecondaryDatabase_DataRetentionTimeInDays(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + _, externalPrimaryId, primaryDatabaseCleanup := acc.SecondaryTestClient().Database.CreatePrimaryDatabase(t, []sdk.AccountIdentifier{ + sdk.NewAccountIdentifierFromAccountLocator(acc.Client(t).GetAccountLocator()), + }) + t.Cleanup(primaryDatabaseCleanup) + + accountDataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + configVariables := func( + id sdk.AccountObjectIdentifier, + primaryDatabaseName sdk.ExternalObjectIdentifier, + dataRetentionTimeInDays *int, + ) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(id.Name()), + "as_replica_of": config.StringVariable(primaryDatabaseName.FullyQualifiedName()), + "transient": config.BoolVariable(false), + "external_volume": config.StringVariable(""), + "catalog": config.StringVariable(""), + "replace_invalid_characters": config.StringVariable("false"), + "default_ddl_collation": config.StringVariable(""), + "storage_serialization_policy": config.StringVariable("OPTIMIZED"), + "log_level": config.StringVariable("OFF"), + "trace_level": config.StringVariable("OFF"), + "comment": config.StringVariable(""), + } + if dataRetentionTimeInDays != nil { + variables["data_retention_time_in_days"] = config.IntegerVariable(*dataRetentionTimeInDays) + variables["max_data_extension_time_in_days"] = config.IntegerVariable(10) + } + return variables + } + + var revertAccountParameterChange func() + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SecondaryDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(id, externalPrimaryId, sdk.Int(2)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "2"), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, sdk.Int(1)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "1"), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + ), + }, + { + PreConfig: func() { + revertAccountParameterChange = acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterDataRetentionTimeInDays, "3") + t.Cleanup(revertAccountParameterChange) + }, + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "3"), + ), + }, + { + PreConfig: func() { + revertAccountParameterChange() + }, + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, sdk.Int(3)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "3"), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + ), + }, + }, + }) +} diff --git a/pkg/resources/shared_database.go b/pkg/resources/shared_database.go new file mode 100644 index 0000000000..e6075a632b --- /dev/null +++ b/pkg/resources/shared_database.go @@ -0,0 +1,305 @@ +package resources + +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var sharedDatabaseSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the identifier for the database; must be unique for your account.", + }, + "from_share": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `\"\".\"\"`.", + }, + // TODO(SNOW-1325381): Add it as an item to discuss and either remove or uncomment (and implement) it + // "is_transient": { + // Type: schema.TypeBool, + // Optional: true, + // ForceNew: true, + // Description: "Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", + // }, + "external_volume": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", + }, + "catalog": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", + }, + "replace_invalid_characters": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", + }, + "default_ddl_collation": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification).", + }, + "storage_serialization_policy": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), + Description: fmt.Sprintf("Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.StorageSerializationPolicyOptimized) && newValue == "" + }, + }, + "log_level": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), + }, + "trace_level": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the database.", + }, +} + +func SharedDatabase() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateSharedDatabase, + UpdateContext: UpdateSharedDatabase, + ReadContext: ReadSharedDatabase, + DeleteContext: DeleteSharedDatabase, + Description: "A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see [Introduction to Secure Data Sharing](https://docs.snowflake.com/en/user-guide/data-sharing-intro).", + + Schema: sharedDatabaseSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + externalShareId := sdk.NewExternalObjectIdentifierFromFullyQualifiedName(d.Get("from_share").(string)) + + var externalVolume *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("external_volume"); ok { + externalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var catalog *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("catalog"); ok { + catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var storageSerializationPolicy *sdk.StorageSerializationPolicy + if v, ok := d.GetOk("storage_serialization_policy"); ok { + storageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(v.(string))) + } + + var logLevel *sdk.LogLevel + if v, ok := d.GetOk("log_level"); ok { + logLevel = sdk.Pointer(sdk.LogLevel(v.(string))) + } + + var traceLevel *sdk.TraceLevel + if v, ok := d.GetOk("trace_level"); ok { + traceLevel = sdk.Pointer(sdk.TraceLevel(v.(string))) + } + + err := client.Databases.CreateShared(ctx, id, externalShareId, &sdk.CreateSharedDatabaseOptions{ + // TODO(SNOW-1325381) + // Transient: GetPropertyAsPointer[bool](d, "is_transient"), + ExternalVolume: externalVolume, + Catalog: catalog, + ReplaceInvalidCharacters: GetPropertyAsPointer[bool](d, "replace_invalid_characters"), + DefaultDDLCollation: GetPropertyAsPointer[string](d, "default_ddl_collation"), + StorageSerializationPolicy: storageSerializationPolicy, + LogLevel: logLevel, + TraceLevel: traceLevel, + Comment: GetPropertyAsPointer[string](d, "comment"), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(helpers.EncodeSnowflakeID(id)) + + return ReadSharedDatabase(ctx, d, meta) +} + +func UpdateSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + if d.HasChange("name") { + newName := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + NewName: &newName, + }) + if err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeSnowflakeID(newName)) + id = newName + } + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if len(comment) > 0 { + err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + Set: &sdk.DatabaseSet{ + Comment: &comment, + }, + }) + if err != nil { + return diag.FromErr(err) + } + } else { + err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + Unset: &sdk.DatabaseUnset{ + Comment: sdk.Bool(true), + }, + }) + if err != nil { + return diag.FromErr(err) + } + } + } + + return ReadSharedDatabase(ctx, d, meta) +} + +func ReadSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + database, err := client.Databases.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query shared database. Marking the resource as removed.", + Detail: fmt.Sprintf("DatabaseName: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return diag.FromErr(err) + } + + parameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Database: id, + }, + }) + if err != nil { + return diag.FromErr(err) + } + + if err := d.Set("name", database.Name); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("from_share", sdk.NewExternalObjectIdentifierFromFullyQualifiedName(database.Origin).FullyQualifiedName()); err != nil { + return diag.FromErr(err) + } + + // TODO(SNOW-1325381) + // if err := d.Set("is_transient", database.Transient); err != nil { + // return diag.FromErr(err) + //} + + if err := d.Set("comment", database.Comment); err != nil { + return diag.FromErr(err) + } + + for _, parameter := range parameters { + switch parameter.Key { + case "EXTERNAL_VOLUME": + if err := d.Set("external_volume", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "CATALOG": + if err := d.Set("catalog", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "DEFAULT_DDL_COLLATION": + if err := d.Set("default_ddl_collation", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "LOG_LEVEL": + if err := d.Set("log_level", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "TRACE_LEVEL": + if err := d.Set("trace_level", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "REPLACE_INVALID_CHARACTERS": + boolValue, err := strconv.ParseBool(parameter.Value) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set("replace_invalid_characters", boolValue); err != nil { + return diag.FromErr(err) + } + case "STORAGE_SERIALIZATION_POLICY": + if err := d.Set("storage_serialization_policy", parameter.Value); err != nil { + return diag.FromErr(err) + } + } + } + + return nil +} + +func DeleteSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + err := client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ + IfExists: sdk.Bool(true), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/shared_database_acceptance_test.go b/pkg/resources/shared_database_acceptance_test.go new file mode 100644 index 0000000000..6c90d5560c --- /dev/null +++ b/pkg/resources/shared_database_acceptance_test.go @@ -0,0 +1,267 @@ +package resources_test + +import ( + "context" + "regexp" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/stretchr/testify/require" +) + +func TestAcc_CreateSharedDatabase_minimal(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + + newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newComment := random.Comment() + + configVariables := func(id sdk.AccountObjectIdentifier, shareName sdk.ExternalObjectIdentifier, comment string) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "from_share": config.StringVariable(shareName.FullyQualifiedName()), + "comment": config.StringVariable(comment), + } + } + + shareExternalId := createShareableDatabase(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(id, shareExternalId, comment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "from_share", shareExternalId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", comment), + ), + }, + { + ConfigVariables: configVariables(newId, shareExternalId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", newId.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "from_share", shareExternalId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", newComment), + ), + }, + // Import all values + { + ConfigVariables: configVariables(newId, shareExternalId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), + ResourceName: "snowflake_shared_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSharedDatabase_complete(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + externalShareId := createShareableDatabase(t) + + externalVolumeId, externalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(externalVolumeCleanup) + + catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(catalogCleanup) + + configVariables := func( + id sdk.AccountObjectIdentifier, + shareName sdk.ExternalObjectIdentifier, + externalVolume sdk.AccountObjectIdentifier, + catalog sdk.AccountObjectIdentifier, + replaceInvalidCharacters bool, + defaultDdlCollation string, + storageSerializationPolicy sdk.StorageSerializationPolicy, + logLevel sdk.LogLevel, + traceLevel sdk.TraceLevel, + comment string, + ) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "from_share": config.StringVariable(shareName.FullyQualifiedName()), + "external_volume": config.StringVariable(externalVolume.Name()), + "catalog": config.StringVariable(catalog.Name()), + "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), + "default_ddl_collation": config.StringVariable(defaultDdlCollation), + "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), + "log_level": config.StringVariable(string(logLevel)), + "trace_level": config.StringVariable(string(traceLevel)), + "comment": config.StringVariable(comment), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables( + id, + externalShareId, + externalVolumeId, + catalogId, + true, + "en_US", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyOptimized)), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", comment), + ), + }, + // Import all values + { + ConfigVariables: configVariables( + id, + externalShareId, + externalVolumeId, + catalogId, + true, + "en_US", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), + ResourceName: "snowflake_shared_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSharedDatabase_InvalidValues(t *testing.T) { + comment := random.Comment() + + configVariables := func( + replaceInvalidCharacters bool, + defaultDdlCollation string, + storageSerializationPolicy string, + logLevel string, + traceLevel string, + comment string, + ) config.Variables { + return config.Variables{ + "name": config.StringVariable(""), + "from_share": config.StringVariable(""), + "external_volume": config.StringVariable(""), + "catalog": config.StringVariable(""), + "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), + "default_ddl_collation": config.StringVariable(defaultDdlCollation), + "storage_serialization_policy": config.StringVariable(storageSerializationPolicy), + "log_level": config.StringVariable(logLevel), + "trace_level": config.StringVariable(traceLevel), + "comment": config.StringVariable(comment), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables( + true, + "en_US", + "invalid_value", + "invalid_value", + "invalid_value", + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), + ExpectError: regexp.MustCompile(`(expected \[{{} log_level}\] to be one of \[\"TRACE\" \"DEBUG\" \"INFO\" \"WARN\" \"ERROR\" \"FATAL\" \"OFF\"\], got invalid_value)|` + + `(expected \[{{} trace_level}\] to be one of \[\"ALWAYS\" \"ON_EVENT\" \"OFF\"\], got invalid_value)|` + + `(expected \[{{} storage_serialization_policy}\] to be one of \[\"COMPATIBLE\" \"OPTIMIZED\"\], got invalid_value)`), + }, + }, + }) +} + +// createShareableDatabase creates a database on the secondary account and enables database sharing on the primary account. +// TODO(SNOW-1431726): Later on, this function should be moved to more sophisticated helpers. +func createShareableDatabase(t *testing.T) sdk.ExternalObjectIdentifier { + t.Helper() + + ctx := context.Background() + + share, shareCleanup := acc.SecondaryTestClient().Share.CreateShare(t) + t.Cleanup(shareCleanup) + + sharedDatabase, sharedDatabaseCleanup := acc.SecondaryTestClient().Database.CreateDatabase(t) + t.Cleanup(sharedDatabaseCleanup) + + err := acc.SecondaryClient(t).Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: sharedDatabase.ID(), + }, share.ID()) + require.NoError(t, err) + t.Cleanup(func() { + err := acc.SecondaryClient(t).Grants.RevokePrivilegeFromShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: sharedDatabase.ID(), + }, share.ID()) + require.NoError(t, err) + }) + + err = acc.SecondaryClient(t).Shares.Alter(ctx, share.ID(), &sdk.AlterShareOptions{ + IfExists: sdk.Bool(true), + Set: &sdk.ShareSet{ + Accounts: []sdk.AccountIdentifier{ + acc.TestClient().Account.GetAccountIdentifier(t), + }, + }, + }) + require.NoError(t, err) + + return sdk.NewExternalObjectIdentifier(acc.SecondaryTestClient().Account.GetAccountIdentifier(t), share.ID()) +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/test.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/test.tf new file mode 100644 index 0000000000..4fb82e93a2 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_secondary_database" "test" { + name = var.name + as_replica_of = var.as_replica_of + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/variables.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/variables.tf new file mode 100644 index 0000000000..2fce70aa2f --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/variables.tf @@ -0,0 +1,12 @@ +variable "name" { + type = string +} + +variable "as_replica_of" { + type = string +} + +variable "comment" { + type = string +} + diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/test.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/test.tf new file mode 100644 index 0000000000..4cefd5d621 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/test.tf @@ -0,0 +1,22 @@ +resource "snowflake_secondary_database" "test" { + name = var.name + as_replica_of = var.as_replica_of + is_transient = var.transient + + data_retention_time_in_days { + value = var.data_retention_time_in_days + } + + max_data_extension_time_in_days { + value = var.max_data_extension_time_in_days + } + + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf new file mode 100644 index 0000000000..cfe7514845 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf @@ -0,0 +1,51 @@ +variable "name" { + type = string +} + +variable "as_replica_of" { + type = string +} + +variable "transient" { + type = bool +} + +variable "data_retention_time_in_days" { + type = string +} + +variable "max_data_extension_time_in_days" { + type = string +} + +variable "external_volume" { + type = string +} + +variable "catalog" { + type = string +} + +variable "replace_invalid_characters" { + type = string +} + +variable "default_ddl_collation" { + type = string +} + +variable "storage_serialization_policy" { + type = string +} + +variable "log_level" { + type = string +} + +variable "trace_level" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf new file mode 100644 index 0000000000..5aa60d21ed --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf @@ -0,0 +1,13 @@ +resource "snowflake_secondary_database" "test" { + name = var.name + as_replica_of = var.as_replica_of + is_transient = var.transient + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/variables.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/variables.tf new file mode 100644 index 0000000000..977a6bdfe1 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/variables.tf @@ -0,0 +1,43 @@ +variable "name" { + type = string +} + +variable "as_replica_of" { + type = string +} + +variable "transient" { + type = bool +} + +variable "external_volume" { + type = string +} + +variable "catalog" { + type = string +} + +variable "replace_invalid_characters" { + type = string +} + +variable "default_ddl_collation" { + type = string +} + +variable "storage_serialization_policy" { + type = string +} + +variable "log_level" { + type = string +} + +variable "trace_level" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/basic/test.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/test.tf new file mode 100644 index 0000000000..31b366401b --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_shared_database" "test" { + name = var.name + from_share = var.from_share + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/basic/variables.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/variables.tf new file mode 100644 index 0000000000..dc80047760 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/variables.tf @@ -0,0 +1,12 @@ +variable "name" { + type = string +} + +variable "from_share" { + type = string +} + +variable "comment" { + type = string +} + diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf new file mode 100644 index 0000000000..5c2f7493b6 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf @@ -0,0 +1,12 @@ +resource "snowflake_shared_database" "test" { + name = var.name + from_share = var.from_share + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf new file mode 100644 index 0000000000..b704eb8dfe --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf @@ -0,0 +1,39 @@ +variable "name" { + type = string +} + +variable "from_share" { + type = string +} + +variable "external_volume" { + type = string +} + +variable "catalog" { + type = string +} + +variable "replace_invalid_characters" { + type = bool +} + +variable "default_ddl_collation" { + type = string +} + +variable "storage_serialization_policy" { + type = string +} + +variable "log_level" { + type = string +} + +variable "trace_level" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/sdk/common_types.go b/pkg/sdk/common_types.go index aaba933466..e95d15671d 100644 --- a/pkg/sdk/common_types.go +++ b/pkg/sdk/common_types.go @@ -234,6 +234,16 @@ const ( LogLevelOff LogLevel = "OFF" ) +var AllLogLevels = []LogLevel{ + LogLevelTrace, + LogLevelDebug, + LogLevelInfo, + LogLevelWarn, + LogLevelError, + LogLevelFatal, + LogLevelOff, +} + type TraceLevel string const ( @@ -241,3 +251,9 @@ const ( TraceLevelOnEvent TraceLevel = "ON_EVENT" TraceLevelOff TraceLevel = "OFF" ) + +var AllTraceLevels = []TraceLevel{ + TraceLevelAlways, + TraceLevelOnEvent, + TraceLevelOff, +} diff --git a/pkg/sdk/context_functions.go b/pkg/sdk/context_functions.go index b21d69d862..e0eb339db8 100644 --- a/pkg/sdk/context_functions.go +++ b/pkg/sdk/context_functions.go @@ -12,6 +12,8 @@ import ( type ContextFunctions interface { // Session functions. CurrentAccount(ctx context.Context) (string, error) + CurrentOrganizationName(ctx context.Context) (string, error) + CurrentAccountName(ctx context.Context) (string, error) CurrentRole(ctx context.Context) (AccountObjectIdentifier, error) CurrentSecondaryRoles(ctx context.Context) (*CurrentSecondaryRoles, error) CurrentRegion(ctx context.Context) (string, error) @@ -70,6 +72,28 @@ func (c *contextFunctions) CurrentAccount(ctx context.Context) (string, error) { return s.CurrentAccount, nil } +func (c *contextFunctions) CurrentOrganizationName(ctx context.Context) (string, error) { + s := &struct { + CurrentOrganizationName string `db:"CURRENT_ORGANIZATION_NAME"` + }{} + err := c.client.queryOne(ctx, s, "SELECT CURRENT_ORGANIZATION_NAME() as CURRENT_ORGANIZATION_NAME") + if err != nil { + return "", err + } + return s.CurrentOrganizationName, nil +} + +func (c *contextFunctions) CurrentAccountName(ctx context.Context) (string, error) { + s := &struct { + CurrentAccountName string `db:"CURRENT_ACCOUNT_NAME"` + }{} + err := c.client.queryOne(ctx, s, "SELECT CURRENT_ACCOUNT_NAME() as CURRENT_ACCOUNT_NAME") + if err != nil { + return "", err + } + return s.CurrentAccountName, nil +} + func (c *contextFunctions) CurrentRole(ctx context.Context) (AccountObjectIdentifier, error) { s := &struct { CurrentRole string `db:"CURRENT_ROLE"` diff --git a/pkg/sdk/databases.go b/pkg/sdk/databases.go index b7be7579d1..cdefbc81d8 100644 --- a/pkg/sdk/databases.go +++ b/pkg/sdk/databases.go @@ -137,24 +137,38 @@ func (row databaseRow) convert() *Database { return database } +type StorageSerializationPolicy string + +const ( + StorageSerializationPolicyCompatible StorageSerializationPolicy = "COMPATIBLE" + StorageSerializationPolicyOptimized StorageSerializationPolicy = "OPTIMIZED" +) + +var AllStorageSerializationPolicies = []StorageSerializationPolicy{ + StorageSerializationPolicyCompatible, + StorageSerializationPolicyOptimized, +} + // CreateDatabaseOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-database. type CreateDatabaseOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Transient *bool `ddl:"keyword" sql:"TRANSIENT"` - database bool `ddl:"static" sql:"DATABASE"` - IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` - name AccountObjectIdentifier `ddl:"identifier"` - Clone *Clone `ddl:"-"` - DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` - MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Transient *bool `ddl:"keyword" sql:"TRANSIENT"` + database bool `ddl:"static" sql:"DATABASE"` + IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` + name AccountObjectIdentifier `ddl:"identifier"` + Clone *Clone `ddl:"-"` + DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` + MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` } func (opts *CreateDatabaseOptions) validate() error { @@ -200,20 +214,22 @@ func (v *databases) Create(ctx context.Context, id AccountObjectIdentifier, opts // CreateSharedDatabaseOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-database. type CreateSharedDatabaseOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Transient *bool `ddl:"keyword" sql:"TRANSIENT"` - database bool `ddl:"static" sql:"DATABASE"` - IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` - name AccountObjectIdentifier `ddl:"identifier"` - fromShare ExternalObjectIdentifier `ddl:"identifier" sql:"FROM SHARE"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Transient *bool `ddl:"keyword" sql:"TRANSIENT"` + database bool `ddl:"static" sql:"DATABASE"` + IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` + name AccountObjectIdentifier `ddl:"identifier"` + fromShare ExternalObjectIdentifier `ddl:"identifier" sql:"FROM SHARE"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` } func (opts *CreateSharedDatabaseOptions) validate() error { @@ -260,21 +276,23 @@ func (v *databases) CreateShared(ctx context.Context, id AccountObjectIdentifier // CreateSecondaryDatabaseOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-database. type CreateSecondaryDatabaseOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Transient *bool `ddl:"keyword" sql:"TRANSIENT"` - database bool `ddl:"static" sql:"DATABASE"` - IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` - name AccountObjectIdentifier `ddl:"identifier"` - primaryDatabase ExternalObjectIdentifier `ddl:"identifier" sql:"AS REPLICA OF"` - DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` - MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Transient *bool `ddl:"keyword" sql:"TRANSIENT"` + database bool `ddl:"static" sql:"DATABASE"` + IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` + name AccountObjectIdentifier `ddl:"identifier"` + primaryDatabase ExternalObjectIdentifier `ddl:"identifier" sql:"AS REPLICA OF"` + DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` + MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` } func (opts *CreateSecondaryDatabaseOptions) validate() error { @@ -362,14 +380,16 @@ func (opts *AlterDatabaseOptions) validate() error { } type DatabaseSet struct { - DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` - MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` + MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` } func (v *DatabaseSet) validate() error { @@ -380,8 +400,8 @@ func (v *DatabaseSet) validate() error { if v.Catalog != nil && !ValidObjectIdentifier(v.Catalog) { errs = append(errs, errInvalidIdentifier("DatabaseSet", "Catalog")) } - if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.DefaultDDLCollation, v.LogLevel, v.TraceLevel, v.Comment) { - errs = append(errs, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.ReplaceInvalidCharacters, v.DefaultDDLCollation, v.StorageSerializationPolicy, v.LogLevel, v.TraceLevel, v.Comment) { + errs = append(errs, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) } return errors.Join(errs...) } @@ -391,7 +411,9 @@ type DatabaseUnset struct { MaxDataExtensionTimeInDays *bool `ddl:"keyword" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` ExternalVolume *bool `ddl:"keyword" sql:"EXTERNAL_VOLUME"` Catalog *bool `ddl:"keyword" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"keyword" sql:"REPLACE_INVALID_CHARACTERS"` DefaultDDLCollation *bool `ddl:"keyword" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *bool `ddl:"keyword" sql:"STORAGE_SERIALIZATION_POLICY"` LogLevel *bool `ddl:"keyword" sql:"LOG_LEVEL"` TraceLevel *bool `ddl:"keyword" sql:"TRACE_LEVEL"` Comment *bool `ddl:"keyword" sql:"COMMENT"` @@ -399,8 +421,8 @@ type DatabaseUnset struct { func (v *DatabaseUnset) validate() error { var errs []error - if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.DefaultDDLCollation, v.LogLevel, v.TraceLevel, v.Comment) { - errs = append(errs, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.ReplaceInvalidCharacters, v.DefaultDDLCollation, v.StorageSerializationPolicy, v.LogLevel, v.TraceLevel, v.Comment) { + errs = append(errs, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) } return errors.Join(errs...) } diff --git a/pkg/sdk/databases_test.go b/pkg/sdk/databases_test.go index 579c77556b..c5b337ccc5 100644 --- a/pkg/sdk/databases_test.go +++ b/pkg/sdk/databases_test.go @@ -74,7 +74,9 @@ func TestDatabasesCreate(t *testing.T) { opts.MaxDataExtensionTimeInDays = Int(1) opts.ExternalVolume = &externalVolumeId opts.Catalog = &catalogId + opts.ReplaceInvalidCharacters = Bool(true) opts.DefaultDDLCollation = String("en_US") + opts.StorageSerializationPolicy = Pointer(StorageSerializationPolicyCompatible) opts.LogLevel = Pointer(LogLevelInfo) opts.TraceLevel = Pointer(TraceLevelOnEvent) opts.Comment = String("comment") @@ -84,7 +86,7 @@ func TestDatabasesCreate(t *testing.T) { Value: "v1", }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE TRANSIENT DATABASE IF NOT EXISTS %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 1 EXTERNAL_VOLUME = %s CATALOG = %s DEFAULT_DDL_COLLATION = 'en_US' LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG ("db1"."schema1"."tag1" = 'v1')`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE TRANSIENT DATABASE IF NOT EXISTS %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 1 EXTERNAL_VOLUME = %s CATALOG = %s REPLACE_INVALID_CHARACTERS = true DEFAULT_DDL_COLLATION = 'en_US' STORAGE_SERIALIZATION_POLICY = COMPATIBLE LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG ("db1"."schema1"."tag1" = 'v1')`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) }) } @@ -141,7 +143,9 @@ func TestDatabasesCreateShared(t *testing.T) { opts.OrReplace = Bool(true) opts.ExternalVolume = &externalVolumeId opts.Catalog = &catalogId + opts.ReplaceInvalidCharacters = Bool(false) opts.DefaultDDLCollation = String("en_US") + opts.StorageSerializationPolicy = Pointer(StorageSerializationPolicyOptimized) opts.LogLevel = Pointer(LogLevelInfo) opts.TraceLevel = Pointer(TraceLevelOnEvent) opts.Comment = String("comment") @@ -151,7 +155,7 @@ func TestDatabasesCreateShared(t *testing.T) { Value: "v1", }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE DATABASE %s FROM SHARE %s EXTERNAL_VOLUME = %s CATALOG = %s DEFAULT_DDL_COLLATION = 'en_US' LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG ("db1"."schema1"."tag1" = 'v1')`, opts.name.FullyQualifiedName(), opts.fromShare.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE DATABASE %s FROM SHARE %s EXTERNAL_VOLUME = %s CATALOG = %s REPLACE_INVALID_CHARACTERS = false DEFAULT_DDL_COLLATION = 'en_US' STORAGE_SERIALIZATION_POLICY = OPTIMIZED LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG ("db1"."schema1"."tag1" = 'v1')`, opts.name.FullyQualifiedName(), opts.fromShare.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) }) } @@ -210,11 +214,13 @@ func TestDatabasesCreateSecondary(t *testing.T) { opts.MaxDataExtensionTimeInDays = Int(10) opts.ExternalVolume = &externalVolumeId opts.Catalog = &catalogId + opts.ReplaceInvalidCharacters = Bool(true) opts.DefaultDDLCollation = String("en_US") + opts.StorageSerializationPolicy = Pointer(StorageSerializationPolicyOptimized) opts.LogLevel = Pointer(LogLevelInfo) opts.TraceLevel = Pointer(TraceLevelOnEvent) opts.Comment = String("comment") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TRANSIENT DATABASE %s AS REPLICA OF %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 10 EXTERNAL_VOLUME = %s CATALOG = %s DEFAULT_DDL_COLLATION = 'en_US' LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment'`, opts.name.FullyQualifiedName(), primaryDatabaseId.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TRANSIENT DATABASE %s AS REPLICA OF %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 10 EXTERNAL_VOLUME = %s CATALOG = %s REPLACE_INVALID_CHARACTERS = true DEFAULT_DDL_COLLATION = 'en_US' STORAGE_SERIALIZATION_POLICY = OPTIMIZED LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment'`, opts.name.FullyQualifiedName(), primaryDatabaseId.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) }) } @@ -255,13 +261,13 @@ func TestDatabasesAlter(t *testing.T) { t.Run("validation: at least one set option", func(t *testing.T) { opts := defaultOpts() opts.Set = &DatabaseSet{} - assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) }) t.Run("validation: at least one unset option", func(t *testing.T) { opts := defaultOpts() opts.Unset = &DatabaseUnset{} - assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) }) t.Run("validation: invalid external volume identifier", func(t *testing.T) { @@ -314,12 +320,14 @@ func TestDatabasesAlter(t *testing.T) { MaxDataExtensionTimeInDays: Int(1), ExternalVolume: &externalVolumeId, Catalog: &catalogId, + ReplaceInvalidCharacters: Bool(true), DefaultDDLCollation: String("en_US"), + StorageSerializationPolicy: Pointer(StorageSerializationPolicyCompatible), LogLevel: Pointer(LogLevelError), TraceLevel: Pointer(TraceLevelOnEvent), Comment: String("comment"), } - assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s SET DATA_RETENTION_TIME_IN_DAYS = 1, MAX_DATA_EXTENSION_TIME_IN_DAYS = 1, EXTERNAL_VOLUME = %s, CATALOG = %s, DEFAULT_DDL_COLLATION = 'en_US', LOG_LEVEL = 'ERROR', TRACE_LEVEL = 'ON_EVENT', COMMENT = 'comment'`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s SET DATA_RETENTION_TIME_IN_DAYS = 1, MAX_DATA_EXTENSION_TIME_IN_DAYS = 1, EXTERNAL_VOLUME = %s, CATALOG = %s, REPLACE_INVALID_CHARACTERS = true, DEFAULT_DDL_COLLATION = 'en_US', STORAGE_SERIALIZATION_POLICY = COMPATIBLE, LOG_LEVEL = 'ERROR', TRACE_LEVEL = 'ON_EVENT', COMMENT = 'comment'`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) }) t.Run("unset", func(t *testing.T) { @@ -329,12 +337,14 @@ func TestDatabasesAlter(t *testing.T) { MaxDataExtensionTimeInDays: Bool(true), ExternalVolume: Bool(true), Catalog: Bool(true), + ReplaceInvalidCharacters: Bool(true), DefaultDDLCollation: Bool(true), + StorageSerializationPolicy: Bool(true), LogLevel: Bool(true), TraceLevel: Bool(true), Comment: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s UNSET DATA_RETENTION_TIME_IN_DAYS, MAX_DATA_EXTENSION_TIME_IN_DAYS, EXTERNAL_VOLUME, CATALOG, DEFAULT_DDL_COLLATION, LOG_LEVEL, TRACE_LEVEL, COMMENT`, opts.name.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s UNSET DATA_RETENTION_TIME_IN_DAYS, MAX_DATA_EXTENSION_TIME_IN_DAYS, EXTERNAL_VOLUME, CATALOG, REPLACE_INVALID_CHARACTERS, DEFAULT_DDL_COLLATION, STORAGE_SERIALIZATION_POLICY, LOG_LEVEL, TRACE_LEVEL, COMMENT`, opts.name.FullyQualifiedName()) }) t.Run("with set tag", func(t *testing.T) { diff --git a/pkg/sdk/identifier_helpers.go b/pkg/sdk/identifier_helpers.go index 056fda4c5b..36a12d6a96 100644 --- a/pkg/sdk/identifier_helpers.go +++ b/pkg/sdk/identifier_helpers.go @@ -123,7 +123,7 @@ func NewAccountIdentifier(organizationName, accountName string) AccountIdentifie func NewAccountIdentifierFromAccountLocator(accountLocator string) AccountIdentifier { return AccountIdentifier{ - accountLocator: accountLocator, + accountLocator: strings.Trim(accountLocator, `"`), } } diff --git a/pkg/sdk/parameters.go b/pkg/sdk/parameters.go index f5dac7eed2..0d44afc777 100644 --- a/pkg/sdk/parameters.go +++ b/pkg/sdk/parameters.go @@ -474,6 +474,8 @@ const ( ObjectParameterUserTaskTimeoutMs ObjectParameter = "USER_TASK_TIMEOUT_MS" ObjectParameterCatalog ObjectParameter = "CATALOG" ObjectParameterExternalVolume ObjectParameter = "EXTERNAL_VOLUME" + ObjectParameterReplaceInvalidCharacters ObjectParameter = "REPLACE_INVALID_CHARACTERS" + ObjectParameterStorageSerializationPolicy ObjectParameter = "STORAGE_SERIALIZATION_POLICY" // User Parameters ObjectParameterEnableUnredactedQuerySyntaxError ObjectParameter = "ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR" diff --git a/pkg/sdk/privileges.go b/pkg/sdk/privileges.go index eb0a3ba97e..db4d4278ef 100644 --- a/pkg/sdk/privileges.go +++ b/pkg/sdk/privileges.go @@ -123,6 +123,7 @@ const ( SchemaPrivilegeAddSearchOptimization SchemaPrivilege = "ADD SEARCH OPTIMIZATION" SchemaPrivilegeApplyBudget SchemaPrivilege = "APPLYBUDGET" SchemaPrivilegeCreateAlert SchemaPrivilege = "CREATE ALERT" + SchemaPrivilegeCreateDataset SchemaPrivilege = "CREATE DATASET" SchemaPrivilegeCreateFileFormat SchemaPrivilege = "CREATE FILE FORMAT" SchemaPrivilegeCreateFunction SchemaPrivilege = "CREATE FUNCTION" SchemaPrivilegeCreateGitRepository SchemaPrivilege = "CREATE GIT REPOSITORY" diff --git a/pkg/sdk/testint/databases_integration_test.go b/pkg/sdk/testint/databases_integration_test.go index 1e52fce35c..5342c01f72 100644 --- a/pkg/sdk/testint/databases_integration_test.go +++ b/pkg/sdk/testint/databases_integration_test.go @@ -80,7 +80,9 @@ func TestInt_DatabasesCreate(t *testing.T) { MaxDataExtensionTimeInDays: sdk.Int(1), ExternalVolume: &externalVolume, Catalog: &catalog, + ReplaceInvalidCharacters: sdk.Bool(true), DefaultDDLCollation: sdk.String("en_US"), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyCompatible), LogLevel: sdk.Pointer(sdk.LogLevelInfo), TraceLevel: sdk.Pointer(sdk.TraceLevelOnEvent), Comment: sdk.String(comment), @@ -124,6 +126,14 @@ func TestInt_DatabasesCreate(t *testing.T) { assert.NoError(t, err) assert.Equal(t, string(sdk.TraceLevelOnEvent), traceLevelParam.Value) + ignoreInvalidCharactersParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterReplaceInvalidCharacters, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseID}) + assert.NoError(t, err) + assert.Equal(t, "true", ignoreInvalidCharactersParam.Value) + + serializationPolicyParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterStorageSerializationPolicy, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseID}) + assert.NoError(t, err) + assert.Equal(t, string(sdk.StorageSerializationPolicyCompatible), serializationPolicyParam.Value) + tag1Value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), database.ID(), sdk.ObjectTypeDatabase) require.NoError(t, err) assert.Equal(t, "v1", tag1Value) @@ -139,13 +149,7 @@ func TestInt_DatabasesCreateShared(t *testing.T) { secondaryClient := testSecondaryClient(t) ctx := testContext(t) - databaseTest, databaseCleanup := testClientHelper().Database.CreateDatabase(t) - t.Cleanup(databaseCleanup) - - schemaTest, schemaCleanup := testClientHelper().Schema.CreateSchemaInDatabase(t, databaseTest.ID()) - t.Cleanup(schemaCleanup) - - testTag, testTagCleanup := testClientHelper().Tag.CreateTagInSchema(t, schemaTest.ID()) + testTag, testTagCleanup := testClientHelper().Tag.CreateTag(t) t.Cleanup(testTagCleanup) externalVolume, externalVolumeCleanup := testClientHelper().ExternalVolume.Create(t) @@ -161,6 +165,8 @@ func TestInt_DatabasesCreateShared(t *testing.T) { sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) t.Cleanup(sharedDatabaseCleanup) + databaseId := sharedDatabase.ID() + err := secondaryClient.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ Database: sharedDatabase.ID(), }, shareTest.ID()) @@ -183,15 +189,17 @@ func TestInt_DatabasesCreateShared(t *testing.T) { require.NoError(t, err) comment := random.Comment() - err = client.Databases.CreateShared(ctx, sharedDatabase.ID(), shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{ - Transient: sdk.Bool(true), - IfNotExists: sdk.Bool(true), - ExternalVolume: &externalVolume, - Catalog: &catalog, - DefaultDDLCollation: sdk.String("en_US"), - LogLevel: sdk.Pointer(sdk.LogLevelDebug), - TraceLevel: sdk.Pointer(sdk.TraceLevelAlways), - Comment: sdk.String(comment), + err = client.Databases.CreateShared(ctx, databaseId, shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{ + Transient: sdk.Bool(true), + IfNotExists: sdk.Bool(true), + ExternalVolume: &externalVolume, + Catalog: &catalog, + ReplaceInvalidCharacters: sdk.Bool(true), + DefaultDDLCollation: sdk.String("en_US"), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyOptimized), + LogLevel: sdk.Pointer(sdk.LogLevelDebug), + TraceLevel: sdk.Pointer(sdk.TraceLevelAlways), + Comment: sdk.String(comment), Tag: []sdk.TagAssociation{ { Name: testTag.ID(), @@ -200,30 +208,38 @@ func TestInt_DatabasesCreateShared(t *testing.T) { }, }) require.NoError(t, err) - t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID())) + t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, databaseId)) - database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) + database, err := client.Databases.ShowByID(ctx, databaseId) require.NoError(t, err) - assert.Equal(t, sharedDatabase.ID().Name(), database.Name) + assert.Equal(t, databaseId.Name(), database.Name) assert.Equal(t, comment, database.Comment) - externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, externalVolume.Name(), externalVolumeParam.Value) - catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, catalog.Name(), catalogParam.Value) - logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.LogLevelDebug), logLevelParam.Value) - traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.TraceLevelAlways), traceLevelParam.Value) + ignoreInvalidCharactersParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterReplaceInvalidCharacters, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, "true", ignoreInvalidCharactersParam.Value) + + serializationPolicyParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterStorageSerializationPolicy, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, string(sdk.StorageSerializationPolicyOptimized), serializationPolicyParam.Value) + tag1Value, err := client.SystemFunctions.GetTag(ctx, testTag.ID(), database.ID(), sdk.ObjectTypeDatabase) require.NoError(t, err) assert.Equal(t, "v1", tag1Value) @@ -237,6 +253,8 @@ func TestInt_DatabasesCreateSecondary(t *testing.T) { sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) t.Cleanup(sharedDatabaseCleanup) + databaseId := sharedDatabase.ID() + err := secondaryClient.Databases.AlterReplication(ctx, sharedDatabase.ID(), &sdk.AlterDatabaseReplicationOptions{ EnableReplication: &sdk.EnableReplication{ ToAccounts: []sdk.AccountIdentifier{ @@ -253,48 +271,59 @@ func TestInt_DatabasesCreateSecondary(t *testing.T) { catalog, catalogCleanup := testClientHelper().CatalogIntegration.Create(t) t.Cleanup(catalogCleanup) - externalDatabaseId := sdk.NewExternalObjectIdentifier(secondaryTestClientHelper().Ids.AccountIdentifierWithLocator(), sharedDatabase.ID()) + externalDatabaseId := sdk.NewExternalObjectIdentifier(secondaryTestClientHelper().Account.GetAccountIdentifier(t), sharedDatabase.ID()) + comment := random.Comment() - err = client.Databases.CreateSecondary(ctx, sharedDatabase.ID(), externalDatabaseId, &sdk.CreateSecondaryDatabaseOptions{ + err = client.Databases.CreateSecondary(ctx, databaseId, externalDatabaseId, &sdk.CreateSecondaryDatabaseOptions{ IfNotExists: sdk.Bool(true), DataRetentionTimeInDays: sdk.Int(1), MaxDataExtensionTimeInDays: sdk.Int(10), ExternalVolume: &externalVolume, Catalog: &catalog, + ReplaceInvalidCharacters: sdk.Bool(true), DefaultDDLCollation: sdk.String("en_US"), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyOptimized), LogLevel: sdk.Pointer(sdk.LogLevelDebug), TraceLevel: sdk.Pointer(sdk.TraceLevelAlways), Comment: sdk.String(comment), }) require.NoError(t, err) - t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID())) + t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, databaseId)) - database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) + database, err := client.Databases.ShowByID(ctx, databaseId) require.NoError(t, err) - assert.Equal(t, sharedDatabase.ID().Name(), database.Name) + assert.Equal(t, databaseId.Name(), database.Name) assert.Equal(t, 1, database.RetentionTime) assert.Equal(t, comment, database.Comment) - param, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterMaxDataExtensionTimeInDays, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + param, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterMaxDataExtensionTimeInDays, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, "10", param.Value) - externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, externalVolume.Name(), externalVolumeParam.Value) - catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, catalog.Name(), catalogParam.Value) - logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.LogLevelDebug), logLevelParam.Value) - traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.TraceLevelAlways), traceLevelParam.Value) + + ignoreInvalidCharactersParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterReplaceInvalidCharacters, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, "true", ignoreInvalidCharactersParam.Value) + + serializationPolicyParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterStorageSerializationPolicy, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, string(sdk.StorageSerializationPolicyOptimized), serializationPolicyParam.Value) } func TestInt_DatabasesAlter(t *testing.T) { @@ -302,14 +331,19 @@ func TestInt_DatabasesAlter(t *testing.T) { secondaryClient := testSecondaryClient(t) ctx := testContext(t) - queryParameterValueForDatabase := func(t *testing.T, id sdk.AccountObjectIdentifier, parameter sdk.ObjectParameter) string { + queryParameterForDatabase := func(t *testing.T, id sdk.AccountObjectIdentifier, parameter sdk.ObjectParameter) *sdk.Parameter { t.Helper() value, err := client.Parameters.ShowObjectParameter(ctx, parameter, sdk.Object{ ObjectType: sdk.ObjectTypeDatabase, Name: id, }) require.NoError(t, err) - return value.Value + return value + } + + queryParameterValueForDatabase := func(t *testing.T, id sdk.AccountObjectIdentifier, parameter sdk.ObjectParameter) string { + t.Helper() + return queryParameterForDatabase(t, id, parameter).Value } testCases := []struct { @@ -334,6 +368,8 @@ func TestInt_DatabasesAlter(t *testing.T) { sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) t.Cleanup(sharedDatabaseCleanup) + databaseId := sharedDatabase.ID() + err := secondaryClient.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ Database: sharedDatabase.ID(), }, shareTest.ID()) @@ -355,13 +391,13 @@ func TestInt_DatabasesAlter(t *testing.T) { }) require.NoError(t, err) - err = client.Databases.CreateShared(ctx, sharedDatabase.ID(), shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{}) + err = client.Databases.CreateShared(ctx, databaseId, shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{}) require.NoError(t, err) - database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) + database, err := client.Databases.ShowByID(ctx, databaseId) require.NoError(t, err) - return database, testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID()) + return database, testClientHelper().Database.DropDatabaseFunc(t, database.ID()) }, }, { @@ -442,6 +478,40 @@ func TestInt_DatabasesAlter(t *testing.T) { require.Equal(t, string(sdk.TraceLevelOff), queryParameterValueForDatabase(t, databaseTest.ID(), sdk.ObjectParameterTraceLevel)) }) + t.Run(fmt.Sprintf("Database: %s - setting and unsetting replace_invalid_characters and storage_serialization_policy", testCase.DatabaseType), func(t *testing.T) { + if testCase.DatabaseType == "From Share" { + t.Skipf("Skipping database test because from share is not supported") + } + + databaseTest, databaseTestCleanup := testCase.CreateFn(t) + t.Cleanup(databaseTestCleanup) + + err := client.Databases.Alter(ctx, databaseTest.ID(), &sdk.AlterDatabaseOptions{ + Set: &sdk.DatabaseSet{ + ReplaceInvalidCharacters: sdk.Bool(true), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyCompatible), + }, + }) + require.NoError(t, err) + + require.Equal(t, "true", queryParameterValueForDatabase(t, databaseTest.ID(), sdk.ObjectParameterReplaceInvalidCharacters)) + require.Equal(t, string(sdk.StorageSerializationPolicyCompatible), queryParameterValueForDatabase(t, databaseTest.ID(), sdk.ObjectParameterStorageSerializationPolicy)) + + err = client.Databases.Alter(ctx, databaseTest.ID(), &sdk.AlterDatabaseOptions{ + Unset: &sdk.DatabaseUnset{ + ReplaceInvalidCharacters: sdk.Bool(true), + StorageSerializationPolicy: sdk.Bool(true), + }, + }) + require.NoError(t, err) + + replaceInvalidCharactersParam := queryParameterForDatabase(t, databaseTest.ID(), sdk.ObjectParameterReplaceInvalidCharacters) + storageSerializationPolicyParam := queryParameterForDatabase(t, databaseTest.ID(), sdk.ObjectParameterStorageSerializationPolicy) + + require.Equal(t, replaceInvalidCharactersParam.Default, replaceInvalidCharactersParam.Value) + require.Equal(t, storageSerializationPolicyParam.Default, storageSerializationPolicyParam.Value) + }) + t.Run(fmt.Sprintf("Database: %s - setting and unsetting external volume and catalog", testCase.DatabaseType), func(t *testing.T) { if testCase.DatabaseType == "From Share" { t.Skipf("Skipping database test because from share is not supported") diff --git a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD index c40eabcb0d..b7b009439e 100644 --- a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD +++ b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD @@ -13,12 +13,12 @@ newer provider versions. We will address these while working on the given object | Object Type | Status | Known issues | |--------------------------|:------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | ACCOUNT | ❌ | [#2030](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2030), [#2015](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2015), [#1891](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1891), [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679), [#1671](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1671), [#1501](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1501), [#1062](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1062) | -| DATABASE | ❌ | [#2590](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2590), [#2321](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2321), [#2277](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2277), [#1833](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1833), [#1770](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1770), [#1453](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1453), [#1371](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1371), [#1367](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1367), [#1045](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1045), [#506](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/506) | +| DATABASE | 👨‍💻 | [#2590](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2590), [#2321](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2321), [#2277](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2277), [#1833](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1833), [#1770](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1770), [#1453](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1453), [#1371](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1371), [#1367](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1367), [#1045](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1045), [#506](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/506) | | DATABASE ROLE | ❌ | - | | NETWORK POLICY | ❌ | - | | RESOURCE MONITOR | ❌ | [#1990](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1990), [#1832](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1832), [#1821](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1821), [#1754](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1754), [#1716](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1716), [#1714](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1714), [#1624](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1624), [#1500](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1500), [#1175](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1175) | | ROLE | ❌ | - | -| SECURITY INTEGRATION | ❌ | [#2719](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2719), [#2568](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2568), [#2177](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2177), [#1851](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1851), [#1773](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1773), [#1741](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1741), [#1637](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1637), [#1503](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1503), [#1498](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1498), [#1421](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1421), [#1224](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1224) | +| SECURITY INTEGRATION | 👨‍💻 | [#2719](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2719), [#2568](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2568), [#2177](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2177), [#1851](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1851), [#1773](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1773), [#1741](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1741), [#1637](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1637), [#1503](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1503), [#1498](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1498), [#1421](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1421), [#1224](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1224) | | USER | ❌ | [#2817](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2817), [#2662](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2662), [#1572](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1572), [#1535](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1535), [#1155](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1155) | | WAREHOUSE | ❌ | [#1844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1844), [#1104](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1104) | | FUNCTION | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2426](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2426), [#1479](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1479), [#1393](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1393), [#1208](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1208), [#1079](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1079) |