diff --git a/provider/provider.go b/provider/provider.go index 6c0b78ff..48f86fe9 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -29,9 +29,9 @@ import ( "github.com/cloudera/terraform-provider-cdp/resources/datahub" "github.com/cloudera/terraform-provider-cdp/resources/datalake" "github.com/cloudera/terraform-provider-cdp/resources/de" - "github.com/cloudera/terraform-provider-cdp/resources/dw" dwaws "github.com/cloudera/terraform-provider-cdp/resources/dw/cluster/aws" dwdatabasecatalog "github.com/cloudera/terraform-provider-cdp/resources/dw/databasecatalog" + "github.com/cloudera/terraform-provider-cdp/resources/dw/virtualwarehouse/hive" "github.com/cloudera/terraform-provider-cdp/resources/environments" "github.com/cloudera/terraform-provider-cdp/resources/iam" "github.com/cloudera/terraform-provider-cdp/resources/ml" @@ -249,7 +249,7 @@ func (p *CdpProvider) Resources(_ context.Context) []func() resource.Resource { opdb.NewDatabaseResource, ml.NewWorkspaceResource, de.NewServiceResource, - dw.NewHiveResource, + hive.NewHiveResource, dwaws.NewDwClusterResource, dwdatabasecatalog.NewDwDatabaseCatalogResource, } diff --git a/provider/provider_test.go b/provider/provider_test.go index 9b1ae792..1bf01a12 100644 --- a/provider/provider_test.go +++ b/provider/provider_test.go @@ -29,9 +29,9 @@ import ( "github.com/cloudera/terraform-provider-cdp/resources/datahub" "github.com/cloudera/terraform-provider-cdp/resources/datalake" "github.com/cloudera/terraform-provider-cdp/resources/de" - "github.com/cloudera/terraform-provider-cdp/resources/dw" dwaws "github.com/cloudera/terraform-provider-cdp/resources/dw/cluster/aws" dwdatabasecatalog "github.com/cloudera/terraform-provider-cdp/resources/dw/databasecatalog" + "github.com/cloudera/terraform-provider-cdp/resources/dw/virtualwarehouse/hive" "github.com/cloudera/terraform-provider-cdp/resources/environments" "github.com/cloudera/terraform-provider-cdp/resources/iam" "github.com/cloudera/terraform-provider-cdp/resources/ml" @@ -635,7 +635,7 @@ func TestCdpProvider_Resources(t *testing.T) { opdb.NewDatabaseResource, ml.NewWorkspaceResource, de.NewServiceResource, - dw.NewHiveResource, + hive.NewHiveResource, dwaws.NewDwClusterResource, dwdatabasecatalog.NewDwDatabaseCatalogResource, } diff --git a/resources/dw/model_hive_vw.go b/resources/dw/model_hive_vw.go deleted file mode 100644 index b5838453..00000000 --- a/resources/dw/model_hive_vw.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2024 Cloudera. All Rights Reserved. -// -// This file is licensed under the Apache License Version 2.0 (the "License"). -// You may not use this file except in compliance with the License. -// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. -// -// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS -// OF ANY KIND, either express or implied. Refer to the License for the specific -// permissions and limitations governing your use of the file. - -package dw - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type hiveResourceModel struct { - ID types.String `tfsdk:"id"` - ClusterID types.String `tfsdk:"cluster_id"` - DbCatalogID types.String `tfsdk:"database_catalog_id"` - Name types.String `tfsdk:"name"` -} diff --git a/resources/dw/schema_hive_vw.go b/resources/dw/schema_hive_vw.go deleted file mode 100644 index 838d3b7a..00000000 --- a/resources/dw/schema_hive_vw.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2024 Cloudera. All Rights Reserved. -// -// This file is licensed under the Apache License Version 2.0 (the "License"). -// You may not use this file except in compliance with the License. -// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. -// -// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS -// OF ANY KIND, either express or implied. Refer to the License for the specific -// permissions and limitations governing your use of the file. - -package dw - -import ( - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" -) - -var hiveSchema = schema.Schema{ - Attributes: map[string]schema.Attribute{ - "id": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - "cluster_id": schema.StringAttribute{ - Required: true, - MarkdownDescription: "The id of the CDW Cluster which the Hive Virtual Warehouse is attached to.", - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - "database_catalog_id": schema.StringAttribute{ - Required: true, - MarkdownDescription: "The id of the Database Catalog which the Hive Virtual Warehouse is attached to.", - PlanModifiers: []planmodifier.String{ - stringplanmodifier.UseStateForUnknown(), - }, - }, - "name": schema.StringAttribute{ - Required: true, - MarkdownDescription: "The name of the Hive Virtual Warehouse.", - }, - }, -} diff --git a/resources/dw/virtualwarehouse/hive/model_hive_vw.go b/resources/dw/virtualwarehouse/hive/model_hive_vw.go new file mode 100644 index 00000000..67106d09 --- /dev/null +++ b/resources/dw/virtualwarehouse/hive/model_hive_vw.go @@ -0,0 +1,44 @@ +// Copyright 2024 Cloudera. All Rights Reserved. +// +// This file is licensed under the Apache License Version 2.0 (the "License"). +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. +// +// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +// OF ANY KIND, either express or implied. Refer to the License for the specific +// permissions and limitations governing your use of the file. + +package hive + +import ( + "time" + + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/cloudera/terraform-provider-cdp/utils" +) + +type resourceModel struct { + ID types.String `tfsdk:"id"` + ClusterID types.String `tfsdk:"cluster_id"` + DatabaseCatalogID types.String `tfsdk:"database_catalog_id"` + Name types.String `tfsdk:"name"` + LastUpdated types.String `tfsdk:"last_updated"` + Status types.String `tfsdk:"status"` + PollingOptions *utils.PollingOptions `tfsdk:"polling_options"` +} + +// TODO these are the same everywhere, abstract this +func (p *resourceModel) getPollingTimeout() time.Duration { + if p.PollingOptions != nil { + return time.Duration(p.PollingOptions.PollingTimeout.ValueInt64()) * time.Minute + } + return 40 * time.Minute +} + +func (p *resourceModel) getCallFailureThreshold() int { + if p.PollingOptions != nil { + return int(p.PollingOptions.CallFailureThreshold.ValueInt64()) + } + return 3 +} diff --git a/resources/dw/resource_hive_vw.go b/resources/dw/virtualwarehouse/hive/resource_hive_vw.go similarity index 52% rename from resources/dw/resource_hive_vw.go rename to resources/dw/virtualwarehouse/hive/resource_hive_vw.go index 0d922527..92f2afc8 100644 --- a/resources/dw/resource_hive_vw.go +++ b/resources/dw/virtualwarehouse/hive/resource_hive_vw.go @@ -8,10 +8,14 @@ // OF ANY KIND, either express or implied. Refer to the License for the specific // permissions and limitations governing your use of the file. -package dw +package hive import ( "context" + "fmt" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/types" @@ -50,7 +54,7 @@ func (r *hiveResource) Schema(_ context.Context, _ resource.SchemaRequest, resp func (r *hiveResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { // Retrieve values from plan - var plan hiveResourceModel + var plan resourceModel diags := req.Plan.Get(ctx, &plan) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { @@ -62,7 +66,7 @@ func (r *hiveResource) Create(ctx context.Context, req resource.CreateRequest, r WithInput(&models.CreateVwRequest{ Name: plan.Name.ValueStringPointer(), ClusterID: plan.ClusterID.ValueStringPointer(), - DbcID: plan.DbCatalogID.ValueStringPointer(), + DbcID: plan.DatabaseCatalogID.ValueStringPointer(), VwType: models.VwTypeHive.Pointer(), }) @@ -77,8 +81,29 @@ func (r *hiveResource) Create(ctx context.Context, req resource.CreateRequest, r } payload := response.GetPayload() + clusterID := plan.ClusterID.ValueStringPointer() + vwID := &payload.VwID + + if opts := plan.PollingOptions; !(opts != nil && opts.Async.ValueBool()) { + callFailedCount := 0 + stateConf := &retry.StateChangeConf{ + Pending: []string{"Accepted", "Creating", "Created", "Starting"}, + Target: []string{"Running"}, + Delay: 30 * time.Second, + Timeout: plan.getPollingTimeout(), + PollInterval: 30 * time.Second, + Refresh: r.stateRefresh(ctx, clusterID, vwID, &callFailedCount, plan.getCallFailureThreshold()), + } + if _, err = stateConf.WaitForStateContext(ctx); err != nil { + resp.Diagnostics.AddError( + "Error waiting for Data Warehouse hive virtual warehouse", + "Could not create hive, unexpected error: "+err.Error(), + ) + return + } + } desc := operations.NewDescribeVwParamsWithContext(ctx). - WithInput(&models.DescribeVwRequest{VwID: &payload.VwID, ClusterID: plan.ClusterID.ValueStringPointer()}) + WithInput(&models.DescribeVwRequest{VwID: vwID, ClusterID: clusterID}) describe, err := r.client.Dw.Operations.DescribeVw(desc) if err != nil { resp.Diagnostics.AddError( @@ -89,13 +114,11 @@ func (r *hiveResource) Create(ctx context.Context, req resource.CreateRequest, r } hive := describe.GetPayload() - - // Map response body to schema and populate Computed attribute values plan.ID = types.StringValue(hive.Vw.ID) - plan.DbCatalogID = types.StringValue(hive.Vw.DbcID) + plan.DatabaseCatalogID = types.StringValue(hive.Vw.DbcID) plan.Name = types.StringValue(hive.Vw.Name) - - // Set state to fully populated data + plan.Status = types.StringValue(hive.Vw.Status) + plan.LastUpdated = types.StringValue(time.Now().Format(time.RFC850)) diags = resp.State.Set(ctx, plan) resp.Diagnostics.Append(diags...) } @@ -109,26 +132,75 @@ func (r *hiveResource) Update(ctx context.Context, req resource.UpdateRequest, r } func (r *hiveResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var state hiveResourceModel + var state resourceModel - // Read Terraform prior state into the model resp.Diagnostics.Append(req.State.Get(ctx, &state)...) - if resp.Diagnostics.HasError() { return } + clusterID := state.ClusterID.ValueStringPointer() + vwID := state.ID.ValueStringPointer() op := operations.NewDeleteVwParamsWithContext(ctx). WithInput(&models.DeleteVwRequest{ - ClusterID: state.ClusterID.ValueStringPointer(), - VwID: state.ID.ValueStringPointer(), + ClusterID: clusterID, + VwID: vwID, }) if _, err := r.client.Dw.Operations.DeleteVw(op); err != nil { + if strings.Contains(err.Error(), "Virtual Warehouse not found") { + return + } resp.Diagnostics.AddError( "Error deleting Hive Virtual Warehouse", "Could not delete Hive Virtual Warehouse, unexpected error: "+err.Error(), ) return } + + if opts := state.PollingOptions; !(opts != nil && opts.Async.ValueBool()) { + callFailedCount := 0 + stateConf := &retry.StateChangeConf{ + Pending: []string{"Deleting", "Running", "Stopping", "Stopped", "Creating", "Created", "Starting", "Updating"}, + Target: []string{"Deleted"}, // This is not an actual state, we added it to fake the state change + Delay: 30 * time.Second, + Timeout: state.getPollingTimeout(), + PollInterval: 30 * time.Second, + Refresh: r.stateRefresh(ctx, clusterID, vwID, &callFailedCount, state.getCallFailureThreshold()), + } + if _, err := stateConf.WaitForStateContext(ctx); err != nil { + resp.Diagnostics.AddError( + "Error waiting for Data Warehouse Hive Virtual Warehouse", + "Could not delete hive, unexpected error: "+err.Error(), + ) + return + } + } +} + +func (r *hiveResource) stateRefresh(ctx context.Context, clusterID *string, vwID *string, callFailedCount *int, callFailureThreshold int) func() (any, string, error) { + return func() (any, string, error) { + tflog.Debug(ctx, "About to describe hive") + params := operations.NewDescribeVwParamsWithContext(ctx). + WithInput(&models.DescribeVwRequest{ClusterID: clusterID, VwID: vwID}) + resp, err := r.client.Dw.Operations.DescribeVw(params) + if err != nil { + if strings.Contains(err.Error(), "Virtual Warehouse not found") { + return &models.DescribeVwResponse{}, "Deleted", nil + } + *callFailedCount++ + if *callFailedCount <= callFailureThreshold { + tflog.Warn(ctx, fmt.Sprintf("could not describe Data Warehouse Hive Virtual Warehouse "+ + "due to [%s] but threshold limit is not reached yet (%d out of %d).", err.Error(), callFailedCount, callFailureThreshold)) + return nil, "", nil + } + tflog.Error(ctx, fmt.Sprintf("error describing Data Warehouse Hive Virtual Warehouse due to [%s] "+ + "failure threshold limit exceeded.", err.Error())) + return nil, "", err + } + *callFailedCount = 0 + vw := resp.GetPayload() + tflog.Debug(ctx, fmt.Sprintf("Described Hive %s with status %s", vw.Vw.ID, vw.Vw.Status)) + return vw, vw.Vw.Status, nil + } } diff --git a/resources/dw/resource_hive_vw_acc_test.go b/resources/dw/virtualwarehouse/hive/resource_hive_vw_acc_test.go similarity index 96% rename from resources/dw/resource_hive_vw_acc_test.go rename to resources/dw/virtualwarehouse/hive/resource_hive_vw_acc_test.go index 3bcef275..3b99853d 100644 --- a/resources/dw/resource_hive_vw_acc_test.go +++ b/resources/dw/virtualwarehouse/hive/resource_hive_vw_acc_test.go @@ -8,7 +8,7 @@ // OF ANY KIND, either express or implied. Refer to the License for the specific // permissions and limitations governing your use of the file. -package dw_test +package hive_test import ( "context" @@ -67,8 +67,6 @@ func TestAccHive_basic(t *testing.T) { resource.TestCheckResourceAttr("cdp_vw_hive.test_hive", "database_catalog_id", params.DatabaseCatalogID), ), }, - // TODO ImportState testing - // TODO Update and Read testing // Delete testing automatically occurs in TestCase }, }) @@ -100,7 +98,7 @@ func testCheckHiveDestroy(s *terraform.State) error { _, err := cdpClient.Dw.Operations.DescribeVw(params) if err != nil { - if strings.Contains(err.Error(), "404") { + if strings.Contains(err.Error(), "Virtual Warehouse not found") { continue } return err diff --git a/resources/dw/resource_hive_vw_test.go b/resources/dw/virtualwarehouse/hive/resource_hive_vw_test.go similarity index 65% rename from resources/dw/resource_hive_vw_test.go rename to resources/dw/virtualwarehouse/hive/resource_hive_vw_test.go index ebcf7416..09d9b788 100644 --- a/resources/dw/resource_hive_vw_test.go +++ b/resources/dw/virtualwarehouse/hive/resource_hive_vw_test.go @@ -8,11 +8,14 @@ // OF ANY KIND, either express or implied. Refer to the License for the specific // permissions and limitations governing your use of the file. -package dw +package hive import ( "context" "fmt" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default" "testing" "github.com/go-openapi/runtime" @@ -58,6 +61,41 @@ var testHiveSchema = schema.Schema{ Required: true, MarkdownDescription: "The name of the Hive Virtual Warehouse.", }, + "last_updated": schema.StringAttribute{ + Computed: true, + MarkdownDescription: "Timestamp of the last Terraform update of the order.", + }, + "status": schema.StringAttribute{ + Computed: true, + MarkdownDescription: "The status of the database catalog.", + }, + "polling_options": schema.SingleNestedAttribute{ + MarkdownDescription: "Polling related configuration options that could specify various values that will be used during CDP resource creation.", + Optional: true, + Attributes: map[string]schema.Attribute{ + "async": schema.BoolAttribute{ + MarkdownDescription: "Boolean value that specifies if Terraform should wait for resource creation/deletion.", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(false), + PlanModifiers: []planmodifier.Bool{ + boolplanmodifier.UseStateForUnknown(), + }, + }, + "polling_timeout": schema.Int64Attribute{ + MarkdownDescription: "Timeout value in minutes that specifies for how long should the polling go for resource creation/deletion.", + Default: int64default.StaticInt64(40), + Computed: true, + Optional: true, + }, + "call_failure_threshold": schema.Int64Attribute{ + MarkdownDescription: "Threshold value that specifies how many times should a single call failure happen before giving up the polling.", + Default: int64default.StaticInt64(3), + Computed: true, + Optional: true, + }, + }, + }, }, } @@ -82,12 +120,34 @@ func createRawHiveResource() tftypes.Value { "cluster_id": tftypes.String, "database_catalog_id": tftypes.String, "name": tftypes.String, + "last_updated": tftypes.String, + "status": tftypes.String, + "polling_options": tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "async": tftypes.Bool, + "polling_timeout": tftypes.Number, + "call_failure_threshold": tftypes.Number, + }, + }, }}, map[string]tftypes.Value{ "id": tftypes.NewValue(tftypes.String, ""), "cluster_id": tftypes.NewValue(tftypes.String, "cluster-id"), "database_catalog_id": tftypes.NewValue(tftypes.String, "database-catalog-id"), "name": tftypes.NewValue(tftypes.String, ""), + "last_updated": tftypes.NewValue(tftypes.String, ""), + "status": tftypes.NewValue(tftypes.String, "Running"), + "polling_options": tftypes.NewValue( + tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "async": tftypes.Bool, + "polling_timeout": tftypes.Number, + "call_failure_threshold": tftypes.Number, + }}, map[string]tftypes.Value{ + "async": tftypes.NewValue(tftypes.Bool, true), + "polling_timeout": tftypes.NewValue(tftypes.Number, 90), + "call_failure_threshold": tftypes.NewValue(tftypes.Number, 3), + }), }) } @@ -164,11 +224,11 @@ func (suite *HiveTestSuite) TestHiveCreate_Success() { // Function under test dwApi.Create(ctx, req, resp) - var result hiveResourceModel + var result resourceModel resp.State.Get(ctx, &result) suite.False(resp.Diagnostics.HasError()) suite.Equal("test-id", result.ID.ValueString()) - suite.Equal("database-catalog-id", result.DbCatalogID.ValueString()) + suite.Equal("database-catalog-id", result.DatabaseCatalogID.ValueString()) suite.Equal("cluster-id", result.ClusterID.ValueString()) suite.Equal("test-name", result.Name.ValueString()) } @@ -195,7 +255,7 @@ func (suite *HiveTestSuite) TestHiveCreate_CreationError() { // Function under test dwApi.Create(ctx, req, resp) - var result hiveResourceModel + var result resourceModel resp.State.Get(ctx, &result) suite.True(resp.Diagnostics.HasError()) suite.Contains(resp.Diagnostics.Errors()[0].Summary(), "Error creating hive virtual warehouse") @@ -224,7 +284,7 @@ func (suite *HiveTestSuite) TestHiveCreate_DescribeError() { // Function under test dwApi.Create(ctx, req, resp) - var result hiveResourceModel + var result resourceModel resp.State.Get(ctx, &result) suite.True(resp.Diagnostics.HasError()) suite.Contains(resp.Diagnostics.Errors()[0].Summary(), "Error creating hive virtual warehouse") @@ -270,3 +330,51 @@ func (suite *HiveTestSuite) TestHiveDeletion_ReturnsError() { dwApi.Delete(ctx, req, resp) suite.True(resp.Diagnostics.HasError()) } + +func (suite *HiveTestSuite) TestStateRefresh_Success() { + ctx := context.TODO() + client := new(mocks.MockDwClientService) + client.On("DescribeVw", mock.Anything).Return( + &operations.DescribeVwOK{ + Payload: &models.DescribeVwResponse{ + Vw: &models.VwSummary{ + ID: "hive-id", + Status: "Running", + }, + }, + }, + nil) + dwApi := NewDwApi(client) + + clusterID := "cluster-id" + vwID := "hive-id" + callFailedCount := 0 + callFailureThreshold := 3 + + // Function under test + refresh := dwApi.stateRefresh(ctx, &clusterID, &vwID, &callFailedCount, callFailureThreshold) + _, status, err := refresh() + suite.NoError(err) + suite.Equal("Running", status) +} + +func (suite *HiveTestSuite) TestStateRefresh_FailureThresholdReached() { + ctx := context.TODO() + client := new(mocks.MockDwClientService) + client.On("DescribeVw", mock.Anything).Return( + &operations.DescribeVwOK{}, fmt.Errorf("unknown error")) + dwApi := NewDwApi(client) + + clusterID := "cluster-id" + vwID := "hive-id" + callFailedCount := 0 + callFailureThreshold := 3 + + // Function under test + refresh := dwApi.stateRefresh(ctx, &clusterID, &vwID, &callFailedCount, callFailureThreshold) + var err error + for i := 0; i <= callFailureThreshold; i++ { + _, _, err = refresh() + } + suite.Error(err, "unknown error") +} diff --git a/resources/dw/virtualwarehouse/hive/schema_hive_vw.go b/resources/dw/virtualwarehouse/hive/schema_hive_vw.go new file mode 100644 index 00000000..db865a75 --- /dev/null +++ b/resources/dw/virtualwarehouse/hive/schema_hive_vw.go @@ -0,0 +1,84 @@ +// Copyright 2024 Cloudera. All Rights Reserved. +// +// This file is licensed under the Apache License Version 2.0 (the "License"). +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. +// +// This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +// OF ANY KIND, either express or implied. Refer to the License for the specific +// permissions and limitations governing your use of the file. + +package hive + +import ( + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64default" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" +) + +var hiveSchema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "cluster_id": schema.StringAttribute{ + Required: true, + MarkdownDescription: "The id of the CDW Cluster which the Hive Virtual Warehouse is attached to.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "database_catalog_id": schema.StringAttribute{ + Required: true, + MarkdownDescription: "The id of the Database Catalog which the Hive Virtual Warehouse is attached to.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "name": schema.StringAttribute{ + Required: true, + MarkdownDescription: "The name of the Hive Virtual Warehouse.", + }, + "last_updated": schema.StringAttribute{ + Computed: true, + MarkdownDescription: "Timestamp of the last Terraform update of the order.", + }, + "status": schema.StringAttribute{ + Computed: true, + MarkdownDescription: "The status of the database catalog.", + }, + "polling_options": schema.SingleNestedAttribute{ + MarkdownDescription: "Polling related configuration options that could specify various values that will be used during CDP resource creation.", + Optional: true, + Attributes: map[string]schema.Attribute{ + "async": schema.BoolAttribute{ + MarkdownDescription: "Boolean value that specifies if Terraform should wait for resource creation/deletion.", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(false), + PlanModifiers: []planmodifier.Bool{ + boolplanmodifier.UseStateForUnknown(), + }, + }, + "polling_timeout": schema.Int64Attribute{ + MarkdownDescription: "Timeout value in minutes that specifies for how long should the polling go for resource creation/deletion.", + Default: int64default.StaticInt64(40), + Computed: true, + Optional: true, + }, + "call_failure_threshold": schema.Int64Attribute{ + MarkdownDescription: "Threshold value that specifies how many times should a single call failure happen before giving up the polling.", + Default: int64default.StaticInt64(3), + Computed: true, + Optional: true, + }, + }, + }, + }, +}