diff --git a/docs/data-sources/app.md b/docs/data-sources/app.md new file mode 100644 index 0000000000..b0b421c6a0 --- /dev/null +++ b/docs/data-sources/app.md @@ -0,0 +1,80 @@ +--- +subcategory: "Apps" +--- +# databricks_app Data Source + +-> This feature is in [Public Preview](https://docs.databricks.com/release-notes/release-types.html). + +[Databricks Apps](https://docs.databricks.com/en/dev-tools/databricks-apps/index.html) run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on. This resource creates the application but does not handle app deployment, which should be handled separately as part of your CI/CD pipeline. + +This data source allows you to fetch information about a Databricks App. + +## Example Usage + +```hcl +data "databricks_app" "this" { + name = "my-custom-app" +} +``` + +## Argument Reference + +The following arguments are required: + +* `name` - The name of the app. + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: + +* `app` attribute + * `name` - The name of the app. + * `description` - The description of the app. + * `resources` - A list of resources that the app have access to. + * `compute_status` attribute + * `state` - State of the app compute. + * `message` - Compute status message + * `app_status` attribute + * `state` - State of the application. + * `message` - Application status message + * `url` - The URL of the app once it is deployed. + * `create_time` - The creation time of the app. + * `creator` - The email of the user that created the app. + * `update_time` - The update time of the app. + * `updater` - The email of the user that last updated the app. + * `service_principal_id` - id of the app service principal + * `service_principal_name` - name of the app service principal + * `default_source_code_path` - The default workspace file system path of the source code from which app deployment are created. This field tracks the workspace source code path of the last active deployment. + +### resources Attribute + +This attribute describes a resource used by the app. + +* `name` - The name of the resource. +* `description` - The description of the resource. + +Exactly one of the following attributes will be provided: + +* `secret` attribute + * `scope` - Scope of the secret to grant permission on. + * `key` - Key of the secret to grant permission on. + * `permission` - Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: `READ`, `WRITE`, `MANAGE`. +* `sql_warehouse` attribute + * `id` - Id of the SQL warehouse to grant permission on. + * `permission` - Permission to grant on the SQL warehouse. Supported permissions are: `CAN_MANAGE`, `CAN_USE`, `IS_OWNER`. +* `serving_endpoint` attribute + * `name` - Name of the serving endpoint to grant permission on. + * `permission` - Permission to grant on the serving endpoint. Supported permissions are: `CAN_MANAGE`, `CAN_QUERY`, `CAN_VIEW`. +* `job` attribute + * `id` - Id of the job to grant permission on. + * `permission` - Permissions to grant on the Job. Supported permissions are: `CAN_MANAGE`, `IS_OWNER`, `CAN_MANAGE_RUN`, `CAN_VIEW`. + +## Related Resources + +The following resources are used in the same context: + +* [databricks_app](../resources/app.md) to manage [Databricks Apps](https://docs.databricks.com/en/dev-tools/databricks-apps/index.html). +* [databricks_sql_endpoint](sql_endpoint.md) to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). +* [databricks_model_serving](model_serving.md) to serve this model on a Databricks serving endpoint. +* [databricks_secret](secret.md) to manage [secrets](https://docs.databricks.com/security/secrets/index.html#secrets-user-guide) in Databricks workspace. +* [databricks_job](job.md) to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code. diff --git a/docs/data-sources/apps.md b/docs/data-sources/apps.md new file mode 100644 index 0000000000..82bd6403a6 --- /dev/null +++ b/docs/data-sources/apps.md @@ -0,0 +1,72 @@ +--- +subcategory: "Apps" +--- +# databricks_apps Data Source + +-> This feature is in [Public Preview](https://docs.databricks.com/release-notes/release-types.html). + +[Databricks Apps](https://docs.databricks.com/en/dev-tools/databricks-apps/index.html) run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on. This resource creates the application but does not handle app deployment, which should be handled separately as part of your CI/CD pipeline. + +This data source allows you to fetch information about all Databricks Apps within a workspace. + +## Example Usage + +```hcl +data "databricks_apps" "all_apps" {} +``` + +## Attribute Reference + +The following attributes are exported: + +* `apps` - A list of [databricks_app](../resources/app.md) resources. + * `name` - The name of the app. + * `description` - The description of the app. + * `resources` - A list of resources that the app have access to. + * `compute_status` attribute + * `state` - State of the app compute. + * `message` - Compute status message + * `app_status` attribute + * `state` - State of the application. + * `message` - Application status message + * `url` - The URL of the app once it is deployed. + * `create_time` - The creation time of the app. + * `creator` - The email of the user that created the app. + * `update_time` - The update time of the app. + * `updater` - The email of the user that last updated the app. + * `service_principal_id` - id of the app service principal + * `service_principal_name` - name of the app service principal + * `default_source_code_path` - The default workspace file system path of the source code from which app deployment are created. This field tracks the workspace source code path of the last active deployment. + +### resources Attribute + +This attribute describes a resource used by the app. + +* `name` - The name of the resource. +* `description` - The description of the resource. + +Exactly one of the following attributes will be provided: + +* `secret` attribute + * `scope` - Scope of the secret to grant permission on. + * `key` - Key of the secret to grant permission on. + * `permission` - Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: `READ`, `WRITE`, `MANAGE`. +* `sql_warehouse` attribute + * `id` - Id of the SQL warehouse to grant permission on. + * `permission` - Permission to grant on the SQL warehouse. Supported permissions are: `CAN_MANAGE`, `CAN_USE`, `IS_OWNER`. +* `serving_endpoint` attribute + * `name` - Name of the serving endpoint to grant permission on. + * `permission` - Permission to grant on the serving endpoint. Supported permissions are: `CAN_MANAGE`, `CAN_QUERY`, `CAN_VIEW`. +* `job` attribute + * `id` - Id of the job to grant permission on. + * `permission` - Permissions to grant on the Job. Supported permissions are: `CAN_MANAGE`, `IS_OWNER`, `CAN_MANAGE_RUN`, `CAN_VIEW`. + +## Related Resources + +The following resources are used in the same context: + +* [databricks_app](../resources/app.md) to manage [Databricks Apps](https://docs.databricks.com/en/dev-tools/databricks-apps/index.html). +* [databricks_sql_endpoint](sql_endpoint.md) to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). +* [databricks_model_serving](model_serving.md) to serve this model on a Databricks serving endpoint. +* [databricks_secret](secret.md) to manage [secrets](https://docs.databricks.com/security/secrets/index.html#secrets-user-guide) in Databricks workspace. +* [databricks_job](job.md) to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code. diff --git a/docs/resources/app.md b/docs/resources/app.md new file mode 100644 index 0000000000..ab6ba214b9 --- /dev/null +++ b/docs/resources/app.md @@ -0,0 +1,114 @@ +--- +subcategory: "Apps" +--- +# databricks_app Resource + +-> This feature is in [Public Preview](https://docs.databricks.com/release-notes/release-types.html). + +[Databricks Apps](https://docs.databricks.com/en/dev-tools/databricks-apps/index.html) run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on. This resource creates the application but does not handle app deployment, which should be handled separately as part of your CI/CD pipeline. + +## Example Usage + +```hcl +resource "databricks_app" "this" { + name = "my-custom-app" + description = "My app" + resources = [{ + name = "sql-warehouse" + sql_warehouse = { + id = "e9ca293f79a74b5c" + permission = "CAN_MANAGE" + } + }, + { + name = "serving-endpoint" + serving_endpoint = { + name = "databricks-meta-llama-3-1-70b-instruct" + permission = "CAN_MANAGE" + } + }, + { + name = "job" + job = { + id = "1234" + permission = "CAN_MANAGE" + } + }] +} +``` + +## Argument Reference + +The following arguments are required: + +* `name` - (Required) The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It must be unique within the workspace. +* `description` - (Optional) The description of the app. +* `resources` - (Optional) A list of resources that the app have access to. + +### resources Configuration Attribute + +This attribute describes a resource used by the app. + +* `name` - (Required) The name of the resource. +* `description` - (Optional) The description of the resource. + +Exactly one of the following attributes must be provided: + +* `secret` attribute + * `scope` - Scope of the secret to grant permission on. + * `key` - Key of the secret to grant permission on. + * `permission` - Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission must be one of: `READ`, `WRITE`, `MANAGE`. +* `sql_warehouse` attribute + * `id` - Id of the SQL warehouse to grant permission on. + * `permission` - Permission to grant on the SQL warehouse. Supported permissions are: `CAN_MANAGE`, `CAN_USE`, `IS_OWNER`. +* `serving_endpoint` attribute + * `name` - Name of the serving endpoint to grant permission on. + * `permission` - Permission to grant on the serving endpoint. Supported permissions are: `CAN_MANAGE`, `CAN_QUERY`, `CAN_VIEW`. +* `job` attribute + * `id` - Id of the job to grant permission on. + * `permission` - Permissions to grant on the Job. Supported permissions are: `CAN_MANAGE`, `IS_OWNER`, `CAN_MANAGE_RUN`, `CAN_VIEW`. + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: + +* `compute_status` attribute + * `state` - State of the app compute. + * `message` - Compute status message +* `app_status` attribute + * `state` - State of the application. + * `message` - Application status message +* `url` - The URL of the app once it is deployed. +* `create_time` - The creation time of the app. +* `creator` - The email of the user that created the app. +* `update_time` - The update time of the app. +* `updater` - The email of the user that last updated the app. +* `service_principal_id` - id of the app service principal +* `service_principal_name` - name of the app service principal +* `default_source_code_path` - The default workspace file system path of the source code from which app deployment are created. This field tracks the workspace source code path of the last active deployment. + +## Import + +This resource can be imported by name: + +```hcl +import { + to = databricks_app.this + id = "" +} +``` + +or using the `terraform` CLI: + +```bash +terraform import databricks_app.this +``` + +## Related Resources + +The following resources are used in the same context: + +* [databricks_sql_endpoint](sql_endpoint.md) to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html). +* [databricks_model_serving](model_serving.md) to serve this model on a Databricks serving endpoint. +* [databricks_secret](secret.md) to manage [secrets](https://docs.databricks.com/security/secrets/index.html#secrets-user-guide) in Databricks workspace. +* [databricks_job](job.md) to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code. diff --git a/docs/resources/permissions.md b/docs/resources/permissions.md index 9696df577e..ce3f30d639 100644 --- a/docs/resources/permissions.md +++ b/docs/resources/permissions.md @@ -423,7 +423,6 @@ Valid [permission levels](https://docs.databricks.com/security/access-control/wo A folder could be specified by using either `directory_path` or `directory_id` attribute. The value for the `directory_id` is the object ID of the resource in the Databricks Workspace that is exposed as `object_id` attribute of the `databricks_directory` resource as shown below. - ```hcl resource "databricks_group" "auto" { display_name = "Automation" @@ -910,6 +909,7 @@ One type argument and at least one access control block argument are required. Exactly one of the following arguments is required: +- `app_name` - [app](app.md) name - `cluster_id` - [cluster](cluster.md) id - `cluster_policy_id` - [cluster policy](cluster_policy.md) id - `instance_pool_id` - [instance pool](instance_pool.md) id diff --git a/internal/acceptance/permissions_test.go b/internal/acceptance/permissions_test.go index 254a8a8a0b..dca6a56260 100644 --- a/internal/acceptance/permissions_test.go +++ b/internal/acceptance/permissions_test.go @@ -947,3 +947,25 @@ func TestAccPermissions_Query(t *testing.T) { ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for query, allowed levels: CAN_MANAGE"), }) } + +func TestAccPermissions_App(t *testing.T) { + loadDebugEnvIfRunsFromIDE(t, "workspace") + if IsGcp(t) { + Skipf(t)("not available on GCP") + } + queryTemplate := ` + resource "databricks_app" "this" { + name = "{var.RANDOM}" + description = "Test app" + }` + WorkspaceLevel(t, Step{ + Template: queryTemplate + makePermissionsTestStage("app_name", "databricks_app.this.name", groupPermissions("CAN_USE")), + }, Step{ + Template: queryTemplate + makePermissionsTestStage("app_name", "databricks_app.this.name", + currentPrincipalPermission(t, "CAN_MANAGE"), groupPermissions("CAN_USE", "CAN_MANAGE")), + }, Step{ + Template: queryTemplate + makePermissionsTestStage("app_name", "databricks_app.this.name", + currentPrincipalPermission(t, "CAN_USE"), groupPermissions("CAN_USE", "CAN_MANAGE")), + ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for apps, allowed levels: CAN_MANAGE"), + }) +} diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 2a83954e39..d0f9c92083 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -12,6 +12,7 @@ import ( "slices" "strings" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/app" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/catalog" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/cluster" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/products/library" @@ -26,29 +27,35 @@ import ( ) // List of resources that have been migrated from SDK V2 to plugin framework +// Keep this list sorted. var migratedResources = []func() resource.Resource{ - qualitymonitor.ResourceQualityMonitor, library.ResourceLibrary, + qualitymonitor.ResourceQualityMonitor, } // List of data sources that have been migrated from SDK V2 to plugin framework +// Keep this list sorted. var migratedDataSources = []func() datasource.DataSource{ volume.DataSourceVolumes, } // List of resources that have been onboarded to the plugin framework - not migrated from sdkv2. +// Keep this list sorted. var pluginFwOnlyResources = []func() resource.Resource{ - // TODO Add resources here - sharing.ResourceShare, // Using the staging name (with pluginframework suffix) + app.ResourceApp, + sharing.ResourceShare, } // List of data sources that have been onboarded to the plugin framework - not migrated from sdkv2. +// Keep this list sorted. var pluginFwOnlyDataSources = []func() datasource.DataSource{ - serving.DataSourceServingEndpoints, + app.DataSourceApp, + app.DataSourceApps, + catalog.DataSourceFunctions, + notificationdestinations.DataSourceNotificationDestinations, registered_model.DataSourceRegisteredModel, registered_model.DataSourceRegisteredModelVersions, - notificationdestinations.DataSourceNotificationDestinations, - catalog.DataSourceFunctions, + serving.DataSourceServingEndpoints, // TODO: Add DataSourceCluster into migratedDataSources after fixing unit tests. cluster.DataSourceCluster, // Using the staging name (with pluginframework suffix) sharing.DataSourceShare, // Using the staging name (with pluginframework suffix) diff --git a/internal/providers/pluginfw/products/app/data_app.go b/internal/providers/pluginfw/products/app/data_app.go new file mode 100644 index 0000000000..b49b43a34f --- /dev/null +++ b/internal/providers/pluginfw/products/app/data_app.go @@ -0,0 +1,85 @@ +package app + +import ( + "context" + "reflect" + + "github.com/databricks/terraform-provider-databricks/common" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/apps_tf" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func DataSourceApp() datasource.DataSource { + return &dataSourceApp{} +} + +type dataSourceApp struct { + client *common.DatabricksClient +} + +type dataApp struct { + Name types.String `tfsdk:"name"` + App types.Object `tfsdk:"app" tf:"computed"` +} + +func (dataApp) GetComplexFieldTypes(context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "app": reflect.TypeOf(apps_tf.App{}), + } +} + +func (a dataSourceApp) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) +} + +func (a dataSourceApp) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = tfschema.DataSourceStructToSchema(ctx, dataApp{}, func(cs tfschema.CustomizableSchema) tfschema.CustomizableSchema { + return cs + }) +} + +func (a *dataSourceApp) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + if a.client == nil && req.ProviderData != nil { + a.client = pluginfwcommon.ConfigureDataSource(req, resp) + } +} + +func (a *dataSourceApp) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, resourceName) + w, diags := a.client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var name types.String + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, path.Root("name"), &name)...) + if resp.Diagnostics.HasError() { + return + } + + appGoSdk, err := w.Apps.GetByName(ctx, name.ValueString()) + if err != nil { + resp.Diagnostics.AddError("failed to read app", err.Error()) + return + } + + var newApp apps_tf.App + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, appGoSdk, &newApp)...) + if resp.Diagnostics.HasError() { + return + } + dataApp := dataApp{Name: name, App: newApp.ToObjectValue(ctx)} + resp.Diagnostics.Append(resp.State.Set(ctx, dataApp)...) + if resp.Diagnostics.HasError() { + return + } +} + +var _ datasource.DataSourceWithConfigure = &dataSourceApp{} diff --git a/internal/providers/pluginfw/products/app/data_app_acc_test.go b/internal/providers/pluginfw/products/app/data_app_acc_test.go new file mode 100644 index 0000000000..024bd36ad9 --- /dev/null +++ b/internal/providers/pluginfw/products/app/data_app_acc_test.go @@ -0,0 +1,46 @@ +package app_test + +import ( + "testing" + + "github.com/databricks/terraform-provider-databricks/internal/acceptance" +) + +const fastApp = ` + resource "databricks_secret_scope" "this" { + name = "tf-{var.STICKY_RANDOM}" + } + + resource "databricks_secret" "this" { + scope = databricks_secret_scope.this.name + key = "tf-{var.STICKY_RANDOM}" + string_value = "secret" + } + + resource "databricks_app" "this" { + name = "{var.STICKY_RANDOM}" + description = "%s" + resources = [{ + name = "secret" + description = "secret for app" + secret = { + scope = databricks_secret_scope.this.name + key = databricks_secret.this.key + permission = "MANAGE" + } + }] + }` + +func TestAccAppDataSource(t *testing.T) { + acceptance.LoadWorkspaceEnv(t) + if acceptance.IsGcp(t) { + acceptance.Skipf(t)("not available on GCP") + } + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: fastApp + ` + data "databricks_app" "this" { + name = databricks_app.this.name + } + `, + }) +} diff --git a/internal/providers/pluginfw/products/app/data_apps.go b/internal/providers/pluginfw/products/app/data_apps.go new file mode 100644 index 0000000000..e5cbc25921 --- /dev/null +++ b/internal/providers/pluginfw/products/app/data_apps.go @@ -0,0 +1,83 @@ +package app + +import ( + "context" + "reflect" + + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/databricks/terraform-provider-databricks/common" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/apps_tf" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func DataSourceApps() datasource.DataSource { + return &dataSourceApps{} +} + +type dataSourceApps struct { + client *common.DatabricksClient +} + +type dataApps struct { + Apps types.List `tfsdk:"app" tf:"computed"` +} + +func (dataApps) GetComplexFieldTypes(context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "app": reflect.TypeOf(apps_tf.App{}), + } +} + +func (a dataSourceApps) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceNamePlural) +} + +func (a dataSourceApps) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = tfschema.DataSourceStructToSchema(ctx, dataApps{}, func(cs tfschema.CustomizableSchema) tfschema.CustomizableSchema { + return cs + }) +} + +func (a *dataSourceApps) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + if a.client == nil && req.ProviderData != nil { + a.client = pluginfwcommon.ConfigureDataSource(req, resp) + } +} + +func (a *dataSourceApps) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, resourceName) + w, diags := a.client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + appsGoSdk, err := w.Apps.ListAll(ctx, apps.ListAppsRequest{}) + if err != nil { + resp.Diagnostics.AddError("failed to read app", err.Error()) + return + } + + apps := []attr.Value{} + for _, appGoSdk := range appsGoSdk { + app := apps_tf.App{} + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, appGoSdk, &app)...) + if resp.Diagnostics.HasError() { + return + } + apps = append(apps, app.ToObjectValue(ctx)) + } + dataApps := dataApps{Apps: types.ListValueMust(apps_tf.App{}.Type(ctx), apps)} + resp.Diagnostics.Append(resp.State.Set(ctx, dataApps)...) + if resp.Diagnostics.HasError() { + return + } +} + +var _ datasource.DataSourceWithConfigure = &dataSourceApp{} diff --git a/internal/providers/pluginfw/products/app/data_apps_acc_test.go b/internal/providers/pluginfw/products/app/data_apps_acc_test.go new file mode 100644 index 0000000000..2719f708c4 --- /dev/null +++ b/internal/providers/pluginfw/products/app/data_apps_acc_test.go @@ -0,0 +1,19 @@ +package app_test + +import ( + "testing" + + "github.com/databricks/terraform-provider-databricks/internal/acceptance" +) + +func TestAccAppsDataSource(t *testing.T) { + acceptance.LoadWorkspaceEnv(t) + if acceptance.IsGcp(t) { + acceptance.Skipf(t)("not available on GCP") + } + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: ` + data "databricks_apps" "this" { } + `, + }) +} diff --git a/internal/providers/pluginfw/products/app/resource_app.go b/internal/providers/pluginfw/products/app/resource_app.go new file mode 100644 index 0000000000..9a6fe6826f --- /dev/null +++ b/internal/providers/pluginfw/products/app/resource_app.go @@ -0,0 +1,225 @@ +package app + +import ( + "context" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/databricks/terraform-provider-databricks/common" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" + "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" + "github.com/databricks/terraform-provider-databricks/internal/service/apps_tf" + "github.com/hashicorp/terraform-plugin-framework-validators/objectvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" +) + +const ( + resourceName = "app" + resourceNamePlural = "apps" +) + +func ResourceApp() resource.Resource { + return &resourceApp{} +} + +type resourceApp struct { + client *common.DatabricksClient +} + +func (a resourceApp) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) +} + +func (a resourceApp) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = tfschema.ResourceStructToSchema(ctx, apps_tf.App{}, func(cs tfschema.CustomizableSchema) tfschema.CustomizableSchema { + cs.AddPlanModifier(stringplanmodifier.RequiresReplace(), "name") + // Computed fields + for _, p := range []string{ + "active_deployment", + "app_status", + "compute_status", + "create_time", + "creator", + "default_source_code_path", + "pending_deployment", + "service_principal_client_id", + "service_principal_id", + "service_principal_name", + "update_time", + "updater", + "url", + } { + cs.SetReadOnly(p) + } + exclusiveFields := []string{"job", "secret", "serving_endpoint", "sql_warehouse"} + paths := path.Expressions{} + for _, field := range exclusiveFields[1:] { + paths = append(paths, path.MatchRelative().AtParent().AtName(field)) + } + cs.AddValidator(objectvalidator.ExactlyOneOf(paths...), "resources", exclusiveFields[0]) + return cs + }) +} + +func (a *resourceApp) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if a.client == nil && req.ProviderData != nil { + a.client = pluginfwcommon.ConfigureResource(req, resp) + } +} + +func (a *resourceApp) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + w, diags := a.client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var app apps_tf.App + resp.Diagnostics.Append(req.Plan.Get(ctx, &app)...) + if resp.Diagnostics.HasError() { + return + } + var appGoSdk apps.App + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, app, &appGoSdk)...) + if resp.Diagnostics.HasError() { + return + } + + // Create the app + waiter, err := w.Apps.Create(ctx, apps.CreateAppRequest{App: &appGoSdk}) + if err != nil { + resp.Diagnostics.AddError("failed to create app", err.Error()) + return + } + + // Store the initial version of the app in state + var newApp apps_tf.App + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, waiter.Response, &newApp)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, newApp)...) + if resp.Diagnostics.HasError() { + return + } + + // Wait for the app to be created + finalApp, err := waiter.Get() + if err != nil { + resp.Diagnostics.AddError("error waiting for app to be ready", err.Error()) + return + } + + // Store the final version of the app in state + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, finalApp, &newApp)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, newApp)...) + if resp.Diagnostics.HasError() { + return + } +} + +func (a *resourceApp) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + w, diags := a.client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var app apps_tf.App + resp.Diagnostics.Append(req.State.Get(ctx, &app)...) + if resp.Diagnostics.HasError() { + return + } + + appGoSdk, err := w.Apps.GetByName(ctx, app.Name.ValueString()) + if err != nil { + resp.Diagnostics.AddError("failed to read app", err.Error()) + return + } + + var newApp apps_tf.App + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, appGoSdk, &newApp)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, newApp)...) + if resp.Diagnostics.HasError() { + return + } +} + +func (a *resourceApp) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + w, diags := a.client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var app apps_tf.App + resp.Diagnostics.Append(req.Plan.Get(ctx, &app)...) + if resp.Diagnostics.HasError() { + return + } + + // Update the app + var appGoSdk apps.App + resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, app, &appGoSdk)...) + if resp.Diagnostics.HasError() { + return + } + _, err := w.Apps.Update(ctx, apps.UpdateAppRequest{App: &appGoSdk, Name: app.Name.ValueString()}) + if err != nil { + resp.Diagnostics.AddError("failed to update app", err.Error()) + return + } + + // Store the updated version of the app in state + var newApp apps_tf.App + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, appGoSdk, &newApp)...) + if resp.Diagnostics.HasError() { + return + } + resp.Diagnostics.Append(resp.State.Set(ctx, newApp)...) + if resp.Diagnostics.HasError() { + return + } +} + +func (a *resourceApp) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + w, diags := a.client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var app apps_tf.App + resp.Diagnostics.Append(req.State.Get(ctx, &app)...) + if resp.Diagnostics.HasError() { + return + } + + // Delete the app + _, err := w.Apps.DeleteByName(ctx, app.Name.ValueString()) + if err != nil && !apierr.IsMissing(err) { + resp.Diagnostics.AddError("failed to delete app", err.Error()) + return + } +} + +func (a *resourceApp) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("name"), req, resp) +} + +var _ resource.ResourceWithConfigure = &resourceApp{} +var _ resource.ResourceWithImportState = &resourceApp{} diff --git a/internal/providers/pluginfw/products/app/resource_app_acc_test.go b/internal/providers/pluginfw/products/app/resource_app_acc_test.go new file mode 100644 index 0000000000..fbbcd7838d --- /dev/null +++ b/internal/providers/pluginfw/products/app/resource_app_acc_test.go @@ -0,0 +1,137 @@ +package app_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/databricks/terraform-provider-databricks/internal/acceptance" +) + +const baseResources = ` + resource "databricks_secret_scope" "this" { + name = "tf-{var.STICKY_RANDOM}" + } + + resource "databricks_secret" "this" { + scope = databricks_secret_scope.this.name + key = "tf-{var.STICKY_RANDOM}" + string_value = "secret" + } + + resource "databricks_sql_endpoint" "this" { + name = "tf-{var.STICKY_RANDOM}" + cluster_size = "2X-Small" + max_num_clusters = 1 + + tags { + custom_tags { + key = "Owner" + value = "eng-dev-ecosystem-team_at_databricks.com" + } + } + } + + resource "databricks_job" "this" { + name = "tf-{var.STICKY_RANDOM}" + } + + resource "databricks_model_serving" "this" { + name = "tf-{var.STICKY_RANDOM}" + config { + served_models { + name = "prod_model" + model_name = "experiment-fixture-model" + model_version = "1" + workload_size = "Small" + scale_to_zero_enabled = true + } + } + } +` + +func makeTemplate(description string) string { + appTemplate := baseResources + ` + resource "databricks_app" "this" { + name = "{var.STICKY_RANDOM}" + description = "%s" + resources = [{ + name = "secret" + description = "secret for app" + secret = { + scope = databricks_secret_scope.this.name + key = databricks_secret.this.key + permission = "MANAGE" + } + }, { + name = "warehouse" + description = "warehouse for app" + job = { + id = databricks_job.this.id + permission = "CAN_MANAGE" + } + }, { + name = "serving endpoint" + description = "serving endpoint for app" + serving_endpoint = { + name = databricks_model_serving.this.name + permission = "CAN_MANAGE" + } + }, { + name = "sql warehouse" + description = "sql warehouse for app" + sql_warehouse = { + id = databricks_sql_endpoint.this.id + permission = "CAN_MANAGE" + } + }] + }` + return fmt.Sprintf(appTemplate, description) +} + +var templateWithInvalidResource = ` + resource "databricks_app" "this" { + name = "{var.STICKY_RANDOM}" + description = "My app" + resources = [{ + name = "invalid resource" + description = "invalid resource for app" + secret = { + permission = "CAN_MANAGE" + key = "test" + scope = "test" + } + sql_warehouse = { + id = "123" + permission = "CAN_MANAGE" + } + }] + }` + +func TestAccApp_InvalidResource(t *testing.T) { + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: templateWithInvalidResource, + ExpectError: regexp.MustCompile(regexp.QuoteMeta(`2 attributes specified when one (and only one) of +[resources[0].job.<.secret,resources[0].job.<.serving_endpoint,resources[0].job.<.sql_warehouse] +is required`)), + }) +} + +func TestAccAppResource(t *testing.T) { + acceptance.LoadWorkspaceEnv(t) + if acceptance.IsGcp(t) { + acceptance.Skipf(t)("not available on GCP") + } + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: makeTemplate("My app"), + }, acceptance.Step{ + Template: makeTemplate("My new app"), + }, acceptance.Step{ + ImportState: true, + ResourceName: "databricks_app.this", + ImportStateIdFunc: acceptance.BuildImportStateIdFunc("databricks_app.this", "name"), + // I cannot enable ImportStateVerify because computed fields don't appear to be filled in during import. + // ImportStateVerify: true, + ImportStateVerifyIdentifierAttribute: "name", + }) +} diff --git a/permissions/permission_definitions.go b/permissions/permission_definitions.go index 398b032a64..1c6e8537cf 100644 --- a/permissions/permission_definitions.go +++ b/permissions/permission_definitions.go @@ -743,5 +743,16 @@ func allResourcePermissions() []resourcePermissions { updateAclCustomizers: []update.ACLCustomizer{update.AddCurrentUserAsManage}, deleteAclCustomizers: []update.ACLCustomizer{update.AddCurrentUserAsManage}, }, + { + field: "app_name", + objectType: "apps", + requestObjectType: "apps", + allowedPermissionLevels: map[string]permissionLevelOptions{ + "CAN_USE": {isManagementPermission: false}, + "CAN_MANAGE": {isManagementPermission: true}, + }, + updateAclCustomizers: []update.ACLCustomizer{update.AddCurrentUserAsManage}, + deleteAclCustomizers: []update.ACLCustomizer{update.AddCurrentUserAsManage}, + }, } } diff --git a/permissions/resource_permissions_test.go b/permissions/resource_permissions_test.go index 983b59fbc1..12b3219acf 100644 --- a/permissions/resource_permissions_test.go +++ b/permissions/resource_permissions_test.go @@ -593,7 +593,7 @@ func TestResourcePermissionsCreate_invalid(t *testing.T) { qa.ResourceFixture{ Resource: ResourcePermissions(), Create: true, - }.ExpectError(t, "at least one type of resource identifier must be set; allowed fields: authorization, cluster_id, cluster_policy_id, dashboard_id, directory_id, directory_path, experiment_id, instance_pool_id, job_id, notebook_id, notebook_path, pipeline_id, registered_model_id, repo_id, repo_path, serving_endpoint_id, sql_alert_id, sql_dashboard_id, sql_endpoint_id, sql_query_id, vector_search_endpoint_id, workspace_file_id, workspace_file_path") + }.ExpectError(t, "at least one type of resource identifier must be set; allowed fields: app_name, authorization, cluster_id, cluster_policy_id, dashboard_id, directory_id, directory_path, experiment_id, instance_pool_id, job_id, notebook_id, notebook_path, pipeline_id, registered_model_id, repo_id, repo_path, serving_endpoint_id, sql_alert_id, sql_dashboard_id, sql_endpoint_id, sql_query_id, vector_search_endpoint_id, workspace_file_id, workspace_file_path") } func TestResourcePermissionsCreate_no_access_control(t *testing.T) {