diff --git a/mws/acceptance/mws_private_access_settings_test.go b/mws/acceptance/mws_private_access_settings_test.go index d0c0a6e1b4..c73b082aff 100644 --- a/mws/acceptance/mws_private_access_settings_test.go +++ b/mws/acceptance/mws_private_access_settings_test.go @@ -18,7 +18,7 @@ func TestMwsAccPrivateAccessSettings(t *testing.T) { resource "databricks_mws_private_access_settings" "this" { account_id = "{env.DATABRICKS_ACCOUNT_ID}" private_access_settings_name = "tf-{var.RANDOM}" - region = "{env.TEST_REGION}" + region = "{env.AWS_REGION}" }`, }, }) diff --git a/mws/acceptance/mws_vpc_endpoint_test.go b/mws/acceptance/mws_vpc_endpoint_test.go index 440e04591e..065e2691b6 100644 --- a/mws/acceptance/mws_vpc_endpoint_test.go +++ b/mws/acceptance/mws_vpc_endpoint_test.go @@ -18,9 +18,9 @@ func TestMwsAccVpcEndpoint(t *testing.T) { resource "databricks_mws_vpc_endpoint" "this" { account_id = "{env.DATABRICKS_ACCOUNT_ID}" vpc_endpoint_name = "tf-{var.RANDOM}" - region = "{env.TEST_REGION}" + region = "{env.AWS_REGION}" aws_vpc_endpoint_id = "{env.TEST_RELAY_VPC_ENDPOINT}" - aws_account_id = "{env.TEST_AWS_ACCOUNT_ID}" + aws_account_id = "{env.AWS_ACCOUNT_ID}" }`, }, }) diff --git a/mws/acceptance/mws_workspaces_test.go b/mws/acceptance/mws_workspaces_test.go index e39614206c..944c59dffb 100644 --- a/mws/acceptance/mws_workspaces_test.go +++ b/mws/acceptance/mws_workspaces_test.go @@ -49,7 +49,7 @@ func TestMwsAccWorkspaces(t *testing.T) { account_id = "{env.DATABRICKS_ACCOUNT_ID}" workspace_name = "terra-{var.RANDOM}" deployment_name = "terra-{var.RANDOM}" - aws_region = "{env.TEST_REGION}" + aws_region = "{env.AWS_REGION}" credentials_id = databricks_mws_credentials.this.credentials_id storage_configuration_id = databricks_mws_storage_configurations.this.storage_configuration_id diff --git a/mws/resource_mws_private_access_settings_test.go b/mws/resource_mws_private_access_settings_test.go index 00bedfceae..5b51a113ad 100644 --- a/mws/resource_mws_private_access_settings_test.go +++ b/mws/resource_mws_private_access_settings_test.go @@ -17,7 +17,7 @@ func TestMwsAccPAS(t *testing.T) { t.Skip("skipping integration test in short mode.") } acctID := qa.GetEnvOrSkipTest(t, "DATABRICKS_ACCOUNT_ID") - awsRegion := qa.GetEnvOrSkipTest(t, "TEST_REGION") + awsRegion := qa.GetEnvOrSkipTest(t, "AWS_REGION") client := common.CommonEnvironmentClient() ctx := context.Background() pasAPI := NewPrivateAccessSettingsAPI(ctx, client) diff --git a/mws/resource_mws_vpc_endpoint_test.go b/mws/resource_mws_vpc_endpoint_test.go index b9797419cd..9914fc0b9c 100644 --- a/mws/resource_mws_vpc_endpoint_test.go +++ b/mws/resource_mws_vpc_endpoint_test.go @@ -20,7 +20,7 @@ func TestMwsAccVPCEndpointIntegration(t *testing.T) { } acctID := qa.GetEnvOrSkipTest(t, "DATABRICKS_ACCOUNT_ID") awsvreID := qa.GetEnvOrSkipTest(t, "TEST_REST_API_VPC_ENDPOINT") - awsRegion := qa.GetEnvOrSkipTest(t, "TEST_REGION") + awsRegion := qa.GetEnvOrSkipTest(t, "AWS_REGION") client := common.CommonEnvironmentClient() ctx := context.Background() vpcEndpointAPI := NewVPCEndpointAPI(ctx, client) diff --git a/scripts/README.md b/scripts/README.md index ded4f95360..fa5821b7b0 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -11,7 +11,7 @@ By default, we don't encourage creation/destruction of infrastructure multiple t * `azcli` - Azure authenticated with `az login` command. No `require_env` file needed. Runnable test name prefixes are `TestAcc` and `TestAzureAcc`. By far, the simplest way to develop provider's functionality. * `azsp` - Azure authenticated with Service Principal's ID/Secret pairs. Runnable test name prefixes are `TestAcc` and `TestAzureAcc`. Service pricipal must have `Storage Blob Data Contributor` role on ADLS account used. `ARM_SUBSCRIPTION_ID`, `ARM_CLIENT_SECRET`, `ARM_CLIENT_ID`, `ARM_TENANT_ID`, `OWNER` environment vars required. Note that these integration tests will use service principal based auth. Even though it is using a service principal, it will still be generating a personal access token to perform creation of resources. -* `mws` - AWS with Databricks Multiworkspace API. Runnable test name prefix is `TestMws`. Please [check if you're able to use it](https://docs.databricks.com/administration-guide/multiworkspace/new-workspace-aws.html). Required variables are `DATABRICKS_ACCOUNT_ID`, `DATABRICKS_USERNAME`, `DATABRICKS_PASSWORD` (something you use for https://accounts.cloud.databricks.com/), `TEST_REGION`, `TEST_CIDR`, `OWNER`. Only multiworkspace resources are tested. +* `mws` - AWS with Databricks Multiworkspace API. Runnable test name prefix is `TestMws`. Please [check if you're able to use it](https://docs.databricks.com/administration-guide/multiworkspace/new-workspace-aws.html). Required variables are `DATABRICKS_ACCOUNT_ID`, `DATABRICKS_USERNAME`, `DATABRICKS_PASSWORD` (something you use for https://accounts.cloud.databricks.com/), `AWS_REGION`, `TEST_CIDR`, `OWNER`. Only multiworkspace resources are tested. * `awsst` - `DATABRICKS_CONFIG_PROFILE` (section within Databricks CLI `~/.databrickscfg` file) & `CLOUD_ENV=AWS`. In case you want to test provider on existing development single-tenant shard. Runnable test name prefixes are `TestAcc` and `TestAwsAcc`. * `awsmt` - AWS with Databricks Multitenant Workspace. Currently work in progress and the test environment cannot be fully started. * most of the tests should aim to be cloud-agnostic. Though, in case of specific branching needed, you can check `CLOUD_ENV` value (possible values are `Azure`, `AWS` & `MWS`). diff --git a/scripts/awsmt-integration/require_env b/scripts/awsmt-integration/require_env index 08956930a8..1bc7519eb3 100644 --- a/scripts/awsmt-integration/require_env +++ b/scripts/awsmt-integration/require_env @@ -1,6 +1,6 @@ DATABRICKS_ACCOUNT_ID DATABRICKS_USERNAME DATABRICKS_PASSWORD -TEST_REGION +AWS_REGION TEST_CIDR OWNER \ No newline at end of file diff --git a/scripts/mws-integration/require_env b/scripts/mws-integration/require_env index 08956930a8..1bc7519eb3 100644 --- a/scripts/mws-integration/require_env +++ b/scripts/mws-integration/require_env @@ -1,6 +1,6 @@ DATABRICKS_ACCOUNT_ID DATABRICKS_USERNAME DATABRICKS_PASSWORD -TEST_REGION +AWS_REGION TEST_CIDR OWNER \ No newline at end of file diff --git a/storage/acceptance/azure_adls_gen1_mount_test.go b/storage/acceptance/azure_adls_gen1_mount_test.go deleted file mode 100644 index 584bf84ea8..0000000000 --- a/storage/acceptance/azure_adls_gen1_mount_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package acceptance - -import ( - "os" - "testing" - - "github.com/databricks/terraform-provider-databricks/common" - "github.com/databricks/terraform-provider-databricks/internal/acceptance" - "github.com/databricks/terraform-provider-databricks/storage" - - "github.com/databricks/terraform-provider-databricks/qa" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" -) - -func TestAzureAccAdlsGen1Mount_correctly_mounts(t *testing.T) { - if _, ok := os.LookupEnv("CLOUD_ENV"); !ok { - t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set") - } - if !common.CommonEnvironmentClient().IsAzureClientSecretSet() { - t.Skip("Test is meant only for client-secret conf Azure") - } - acceptance.AccTest(t, resource.TestCase{ - Steps: []resource.TestStep{ - { - Config: acceptance.EnvironmentTemplate(t, ` - resource "databricks_secret_scope" "terraform" { - name = "terraform-{var.RANDOM}" - initial_manage_principal = "users" - } - resource "databricks_secret" "client_secret" { - key = "datalake_sp_secret" - string_value = "{env.ARM_CLIENT_SECRET}" - scope = databricks_secret_scope.terraform.name - } - resource "databricks_azure_adls_gen1_mount" "mount" { - storage_resource_name = "{env.TEST_DATA_LAKE_STORE_NAME}" - mount_name = "localdir{var.RANDOM}" - tenant_id = "{env.ARM_TENANT_ID}" - client_id = "{env.ARM_CLIENT_ID}" - client_secret_scope = databricks_secret_scope.terraform.name - client_secret_key = databricks_secret.client_secret.key - }`), - }, - }, - }) -} - -func TestAzureAccADLSv1Mount(t *testing.T) { - client, mp := mountPointThroughReusedCluster(t) - if !client.IsAzureClientSecretSet() { - t.Skip("Test is meant only for client-secret conf Azure") - } - storageResource := qa.GetEnvOrSkipTest(t, "TEST_DATA_LAKE_STORE_NAME") - testWithNewSecretScope(t, func(scope, key string) { - testMounting(t, mp, storage.AzureADLSGen1Mount{ - ClientID: client.AzureClientID, - TenantID: client.AzureTenantID, - PrefixType: "dfs.adls", - StorageResource: storageResource, - Directory: "/", - SecretScope: scope, - SecretKey: key, - }) - }, client, mp.Name, client.AzureClientSecret) -} diff --git a/storage/acceptance/mount_test.go b/storage/acceptance/mount_test.go index 718236bf9b..03e7b74487 100644 --- a/storage/acceptance/mount_test.go +++ b/storage/acceptance/mount_test.go @@ -50,27 +50,6 @@ func TestAwsAccS3MountGeneric(t *testing.T) { }) } -func TestAzureAccADLSv1MountGeneric(t *testing.T) { - client, mp := mountPointThroughReusedCluster(t) - if !client.IsAzureClientSecretSet() { - t.Skip("Test is meant only for client-secret conf Azure") - } - storageResource := qa.GetEnvOrSkipTest(t, "TEST_DATA_LAKE_STORE_NAME") - testWithNewSecretScope(t, func(scope, key string) { - testMounting(t, mp, - storage.GenericMount{ - Adl: &storage.AzureADLSGen1MountGeneric{ - ClientID: client.AzureClientID, - TenantID: client.AzureTenantID, - PrefixType: "dfs.adls", - StorageResource: storageResource, - Directory: "/", - SecretScope: scope, - SecretKey: key, - }}) - }, client, mp.Name, client.AzureClientSecret) -} - func TestAzureAccADLSv2MountGeneric(t *testing.T) { client, mp := mountPointThroughReusedCluster(t) if !client.IsAzureClientSecretSet() {