From bc1bd9543bf6cbb8c865f0ec3bed5cd3d1db6c99 Mon Sep 17 00:00:00 2001 From: Hunter Tom Date: Wed, 13 Mar 2024 17:57:56 -0700 Subject: [PATCH 01/10] Created datasource for database. --- .../timestreamwrite/database_data_source.go | 84 ++++++++++++ .../database_data_source_test.go | 120 ++++++++++++++++++ .../timestreamwrite/service_package_gen.go | 7 +- .../d/timestreamwrite_database.html.markdown | 36 ++++++ 4 files changed, 246 insertions(+), 1 deletion(-) create mode 100644 internal/service/timestreamwrite/database_data_source.go create mode 100644 internal/service/timestreamwrite/database_data_source_test.go create mode 100644 website/docs/d/timestreamwrite_database.html.markdown diff --git a/internal/service/timestreamwrite/database_data_source.go b/internal/service/timestreamwrite/database_data_source.go new file mode 100644 index 000000000000..84ae3ceca69a --- /dev/null +++ b/internal/service/timestreamwrite/database_data_source.go @@ -0,0 +1,84 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package timestreamwrite + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-provider-aws/internal/create" + "github.com/hashicorp/terraform-provider-aws/internal/framework" + "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + "github.com/hashicorp/terraform-provider-aws/names" +) + +// Function annotations are used for datasource registration to the Provider. DO NOT EDIT. +// @FrameworkDataSource(name="Database") +func newDataSourceDatabase(context.Context) (datasource.DataSourceWithConfigure, error) { + return &dataSourceDatabase{}, nil +} + +const ( + DSNameDatabase = "Database Data Source" +) + +type dataSourceDatabase struct { + framework.DataSourceWithConfigure +} + +func (d *dataSourceDatabase) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { // nosemgrep:ci.meta-in-func-name + resp.TypeName = "aws_timestreamwrite_database" +} + +func (d *dataSourceDatabase) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + "arn": framework.ARNAttributeComputedOnly(), + "database_name": schema.StringAttribute{ + Required: true, + }, + "kms_key_id": schema.StringAttribute{ + Computed: true, + }, + "table_count": schema.Int64Attribute{ + Computed: true, + }, + }, + } +} + +func (d *dataSourceDatabase) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + conn := d.Meta().TimestreamWriteClient(ctx) + + var data dataSourceDatabaseData + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + out, err := findDatabaseByName(ctx, conn, data.DatabaseName.ValueString()) + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.TimestreamWrite, create.ErrActionReading, DSNameDatabase, data.DatabaseName.String(), err), + err.Error(), + ) + return + } + + data.ARN = flex.StringToFramework(ctx, out.Arn) + data.DatabaseName = flex.StringToFramework(ctx, out.DatabaseName) + data.KmsKeyId = flex.StringToFramework(ctx, out.KmsKeyId) + data.TableCount = flex.Int64ToFramework(ctx, &out.TableCount) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +type dataSourceDatabaseData struct { + ARN types.String `tfsdk:"arn"` + DatabaseName types.String `tfsdk:"database_name"` + KmsKeyId types.String `tfsdk:"kms_key_id"` + TableCount types.Int64 `tfsdk:"table_count"` +} diff --git a/internal/service/timestreamwrite/database_data_source_test.go b/internal/service/timestreamwrite/database_data_source_test.go new file mode 100644 index 000000000000..5498f0bebcef --- /dev/null +++ b/internal/service/timestreamwrite/database_data_source_test.go @@ -0,0 +1,120 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package timestreamwrite_test + +import ( + "fmt" + "testing" + + "github.com/YakDriver/regexache" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-aws/internal/acctest" + + "github.com/hashicorp/terraform-provider-aws/names" +) + +func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) { + ctx := acctest.Context(t) + + if testing.Short() { + t.Skip("skipping long-running test in short mode") + } + + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_timestreamwrite_database.test" + resourceName := "aws_timestreamwrite_database.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: nil, + Steps: []resource.TestStep{ + { + Config: testAccDatabaseDataSourceConfig_basic(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrPair(dataSourceName, "arn", resourceName, "arn"), + resource.TestCheckResourceAttr(dataSourceName, "database_name", rName), + acctest.MatchResourceAttrRegionalARN(resourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), + resource.TestCheckResourceAttr(dataSourceName, "table_count", "0"), + ), + }, + }, + }) +} + +func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { + ctx := acctest.Context(t) + resourceName := "aws_timestreamwrite_database.test" + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_timestreamwrite_database.test" + kmsResourceName := "aws_kms_key.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckDatabaseDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccDatabaseDataSourceConfig_kmsKey(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "database_name", rName), + resource.TestCheckResourceAttrPair(dataSourceName, "kms_key_id", kmsResourceName, "arn"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDatabaseDataSourceConfig_basic(rName string) string { + return fmt.Sprintf(` +resource "aws_timestreamwrite_database" "test" { + database_name = %[1]q +} + +data "aws_timestreamwrite_database" "test" { + database_name = aws_timestreamwrite_database.test.database_name +} +`, rName) +} + +func testAccDatabaseDataSourceConfig_kmsKey(rName string) string { + return fmt.Sprintf(` +resource "aws_kms_key" "test" { + description = %[1]q + + policy = < Date: Thu, 14 Mar 2024 07:46:34 -0700 Subject: [PATCH 02/10] Corrected linting for package imports ["Std" "Third party" "Third party"] --- internal/service/timestreamwrite/database_data_source_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/service/timestreamwrite/database_data_source_test.go b/internal/service/timestreamwrite/database_data_source_test.go index 5498f0bebcef..0f00c9663c6a 100644 --- a/internal/service/timestreamwrite/database_data_source_test.go +++ b/internal/service/timestreamwrite/database_data_source_test.go @@ -11,7 +11,6 @@ import ( sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-aws/internal/acctest" - "github.com/hashicorp/terraform-provider-aws/names" ) From 671d23b4fe309629b634f15d17383db92ca20c8e Mon Sep 17 00:00:00 2001 From: Hunter Tom Date: Thu, 14 Mar 2024 07:53:10 -0700 Subject: [PATCH 03/10] Corrected Markdown format linter and terraformfmt linter --- internal/service/timestreamwrite/database_data_source_test.go | 2 +- website/docs/d/timestreamwrite_database.html.markdown | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/service/timestreamwrite/database_data_source_test.go b/internal/service/timestreamwrite/database_data_source_test.go index 0f00c9663c6a..c9fbb31e2c85 100644 --- a/internal/service/timestreamwrite/database_data_source_test.go +++ b/internal/service/timestreamwrite/database_data_source_test.go @@ -113,7 +113,7 @@ resource "aws_timestreamwrite_database" "test" { } data "aws_timestreamwrite_database" "test" { - database_name = aws_timestreamwrite_database.test.database_name + database_name = aws_timestreamwrite_database.test.database_name } `, rName) } diff --git a/website/docs/d/timestreamwrite_database.html.markdown b/website/docs/d/timestreamwrite_database.html.markdown index 0983bc6bae86..d2e15be51c84 100644 --- a/website/docs/d/timestreamwrite_database.html.markdown +++ b/website/docs/d/timestreamwrite_database.html.markdown @@ -33,4 +33,4 @@ This data source exports the following attributes in addition to the arguments a * `id` - The name of the Timestream database. * `arn` - The ARN that uniquely identifies this database. * `kms_key_id` - The ARN of the KMS key used to encrypt the data stored in the database. -* `table_count` - The total number of tables found within the Timestream database. \ No newline at end of file +* `table_count` - The total number of tables found within the Timestream database. From 632de649d18739368738acccfe6d2726bcca49e4 Mon Sep 17 00:00:00 2001 From: Hunter Tom Date: Thu, 14 Mar 2024 07:57:07 -0700 Subject: [PATCH 04/10] Added .changelog entry. --- .changelog/36368.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .changelog/36368.txt diff --git a/.changelog/36368.txt b/.changelog/36368.txt new file mode 100644 index 000000000000..83ea46b996ba --- /dev/null +++ b/.changelog/36368.txt @@ -0,0 +1,3 @@ +```release-note:new-data-source +aws_timestreamwrite_database +``` \ No newline at end of file From 13f2930f41197c1f7a86c3b8e50b6d6d31de96a5 Mon Sep 17 00:00:00 2001 From: Hunter Tom Date: Thu, 14 Mar 2024 08:22:22 -0700 Subject: [PATCH 05/10] Added Tags to Data Source --- .../timestreamwrite/database_data_source.go | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/internal/service/timestreamwrite/database_data_source.go b/internal/service/timestreamwrite/database_data_source.go index 84ae3ceca69a..2475d43c403a 100644 --- a/internal/service/timestreamwrite/database_data_source.go +++ b/internal/service/timestreamwrite/database_data_source.go @@ -6,12 +6,16 @@ package timestreamwrite import ( "context" + // "github.com/aws/aws-sdk-go-v2/aws" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-provider-aws/internal/create" + // "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" "github.com/hashicorp/terraform-provider-aws/internal/framework" "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + // "github.com/hashicorp/terraform-provider-aws/internal/tags" + // tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -72,7 +76,19 @@ func (d *dataSourceDatabase) Read(ctx context.Context, req datasource.ReadReques data.DatabaseName = flex.StringToFramework(ctx, out.DatabaseName) data.KmsKeyId = flex.StringToFramework(ctx, out.KmsKeyId) data.TableCount = flex.Int64ToFramework(ctx, &out.TableCount) + + tags, err := listTags(ctx, conn, *out.DatabaseName) + if err != nil { + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.TimestreamWrite, create.ErrActionReading, DSNameDatabase, data.DatabaseName.String(), err), + err.Error(), + ) + return + } + ignoreTagsConfig := d.Meta().IgnoreTagsConfig + data.Tags = flex.FlattenFrameworkStringValueMapLegacy(ctx, tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig).Map()) + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } @@ -81,4 +97,5 @@ type dataSourceDatabaseData struct { DatabaseName types.String `tfsdk:"database_name"` KmsKeyId types.String `tfsdk:"kms_key_id"` TableCount types.Int64 `tfsdk:"table_count"` + Tags types.Map `tfsdk:"tags"` } From b9fc9c28c9e73c5e993e0146577c3452252b544f Mon Sep 17 00:00:00 2001 From: Hunter Tom Date: Thu, 14 Mar 2024 09:07:55 -0700 Subject: [PATCH 06/10] Adding tags --- .../timestreamwrite/database_data_source.go | 9 +- .../database_data_source_test.go | 91 ++++++++++++++++++- 2 files changed, 93 insertions(+), 7 deletions(-) diff --git a/internal/service/timestreamwrite/database_data_source.go b/internal/service/timestreamwrite/database_data_source.go index 2475d43c403a..54ade304e544 100644 --- a/internal/service/timestreamwrite/database_data_source.go +++ b/internal/service/timestreamwrite/database_data_source.go @@ -15,7 +15,7 @@ import ( "github.com/hashicorp/terraform-provider-aws/internal/framework" "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" // "github.com/hashicorp/terraform-provider-aws/internal/tags" - // tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" + tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/names" ) @@ -50,6 +50,7 @@ func (d *dataSourceDatabase) Schema(ctx context.Context, req datasource.SchemaRe "table_count": schema.Int64Attribute{ Computed: true, }, + "tags": tftags.TagsSchemaComputed(), }, } } @@ -76,7 +77,7 @@ func (d *dataSourceDatabase) Read(ctx context.Context, req datasource.ReadReques data.DatabaseName = flex.StringToFramework(ctx, out.DatabaseName) data.KmsKeyId = flex.StringToFramework(ctx, out.KmsKeyId) data.TableCount = flex.Int64ToFramework(ctx, &out.TableCount) - + tags, err := listTags(ctx, conn, *out.DatabaseName) if err != nil { resp.Diagnostics.AddError( @@ -88,7 +89,7 @@ func (d *dataSourceDatabase) Read(ctx context.Context, req datasource.ReadReques ignoreTagsConfig := d.Meta().IgnoreTagsConfig data.Tags = flex.FlattenFrameworkStringValueMapLegacy(ctx, tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig).Map()) - + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } @@ -97,5 +98,5 @@ type dataSourceDatabaseData struct { DatabaseName types.String `tfsdk:"database_name"` KmsKeyId types.String `tfsdk:"kms_key_id"` TableCount types.Int64 `tfsdk:"table_count"` - Tags types.Map `tfsdk:"tags"` + Tags types.Map `tfsdk:"tags"` } diff --git a/internal/service/timestreamwrite/database_data_source_test.go b/internal/service/timestreamwrite/database_data_source_test.go index c9fbb31e2c85..4d9e2107b9f7 100644 --- a/internal/service/timestreamwrite/database_data_source_test.go +++ b/internal/service/timestreamwrite/database_data_source_test.go @@ -21,9 +21,9 @@ func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) { t.Skip("skipping long-running test in short mode") } - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) dataSourceName := "data.aws_timestreamwrite_database.test" resourceName := "aws_timestreamwrite_database.test" + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, @@ -46,10 +46,11 @@ func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) { func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { ctx := acctest.Context(t) - resourceName := "aws_timestreamwrite_database.test" - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_timestreamwrite_database.test" kmsResourceName := "aws_kms_key.test" + resourceName := "aws_timestreamwrite_database.test" + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, @@ -73,6 +74,57 @@ func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { }) } +func TestAccTimestreamWriteDatabaseDataSource_tags(t *testing.T) { + ctx := acctest.Context(t) + + dataSourceName := "data.aws_timestreamwrite_database.test" + resourceName := "aws_timestreamwrite_database.test" + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckDatabaseDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccDatabaseConfig_tags1(rName, "key1", "value1"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "tags.%", "1"), + resource.TestCheckResourceAttr(dataSourceName, "tags.key1", "value1"), + resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "1"), + resource.TestCheckResourceAttr(dataSourceName, "tags_all.key1", "value1"), + ), + }, + { + Config: testAccDatabaseConfig_tags2(rName, "key1", "value1updated", "key2", "value2"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "tags.%", "2"), + resource.TestCheckResourceAttr(dataSourceName, "tags.key1", "value1updated"), + resource.TestCheckResourceAttr(dataSourceName, "tags.key2", "value2"), + resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "2"), + resource.TestCheckResourceAttr(dataSourceName, "tags_all.key1", "value1updated"), + resource.TestCheckResourceAttr(dataSourceName, "tags_all.key2", "value2"), + ), + }, + { + Config: testAccDatabaseConfig_tags1(rName, "key2", "value2"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "tags.%", "1"), + resource.TestCheckResourceAttr(dataSourceName, "tags.key2", "value2"), + resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "1"), + resource.TestCheckResourceAttr(dataSourceName, "tags_all.key2", "value2"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func testAccDatabaseDataSourceConfig_basic(rName string) string { return fmt.Sprintf(` resource "aws_timestreamwrite_database" "test" { @@ -117,3 +169,36 @@ data "aws_timestreamwrite_database" "test" { } `, rName) } + +func testAccDatabaseDataSourceConfig_tags1(rName, tagKey1, tagValue1 string) string { + return fmt.Sprintf(` +resource "aws_timestreamwrite_database" "test" { + database_name = %[1]q + + tags = { + %[2]q = %[3]q + } +} + +data "aws_timestreamwrite_database" "test" { + database_name = aws_timestreamwrite_database.test.database_name +} +`, rName, tagKey1, tagValue1) +} + +func testAccDatabaseDataSourceConfig_tags2(rName, tagKey1, tagValue1, tagKey2, tagValue2 string) string { + return fmt.Sprintf(` +resource "aws_timestreamwrite_database" "test" { + database_name = %[1]q + + tags = { + %[2]q = %[3]q + %[4]q = %[5]q + } +} + +data "aws_timestreamwrite_database" "test" { + database_name = aws_timestreamwrite_database.test.database_name +} +`, rName, tagKey1, tagValue1, tagKey2, tagValue2) +} From 3404d0d4ca7f497bacba6a4bc9768672c6fa0aa7 Mon Sep 17 00:00:00 2001 From: Hunter Tom Date: Tue, 26 Mar 2024 17:21:11 -0700 Subject: [PATCH 07/10] Changed from TF Plugin Framework to SDKv2 for consistency sake across datasources and resources. --- .../timestreamwrite/database_data_source.go | 115 ++++++++--------- .../database_data_source_test.go | 117 +++++++++++------- 2 files changed, 123 insertions(+), 109 deletions(-) diff --git a/internal/service/timestreamwrite/database_data_source.go b/internal/service/timestreamwrite/database_data_source.go index 54ade304e544..6e5607dd2b2a 100644 --- a/internal/service/timestreamwrite/database_data_source.go +++ b/internal/service/timestreamwrite/database_data_source.go @@ -6,97 +6,80 @@ package timestreamwrite import ( "context" - // "github.com/aws/aws-sdk-go-v2/aws" - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-aws/internal/conns" "github.com/hashicorp/terraform-provider-aws/internal/create" - // "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" - "github.com/hashicorp/terraform-provider-aws/internal/framework" - "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" - // "github.com/hashicorp/terraform-provider-aws/internal/tags" tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" "github.com/hashicorp/terraform-provider-aws/names" ) // Function annotations are used for datasource registration to the Provider. DO NOT EDIT. -// @FrameworkDataSource(name="Database") -func newDataSourceDatabase(context.Context) (datasource.DataSourceWithConfigure, error) { - return &dataSourceDatabase{}, nil -} - -const ( - DSNameDatabase = "Database Data Source" -) - -type dataSourceDatabase struct { - framework.DataSourceWithConfigure -} - -func (d *dataSourceDatabase) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { // nosemgrep:ci.meta-in-func-name - resp.TypeName = "aws_timestreamwrite_database" -} - -func (d *dataSourceDatabase) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = schema.Schema{ - Attributes: map[string]schema.Attribute{ - "arn": framework.ARNAttributeComputedOnly(), - "database_name": schema.StringAttribute{ +// @SDKDataSource("aws_timestreamwrite_database", name="Database") +func DataSourceDatabase() *schema.Resource { + return &schema.Resource{ + ReadWithoutTimeout: dataSourceDatabaseRead, + + Schema: map[string]*schema.Schema{ + "arn": { + Type: schema.TypeString, + Computed: true, + }, + "database_name": { + Type: schema.TypeString, Required: true, }, - "kms_key_id": schema.StringAttribute{ + "kms_key_id": { + Type: schema.TypeString, Computed: true, }, - "table_count": schema.Int64Attribute{ + "table_count": { + Type: schema.TypeInt, Computed: true, }, - "tags": tftags.TagsSchemaComputed(), + names.AttrTags: tftags.TagsSchema(), + names.AttrTagsAll: tftags.TagsSchemaComputed(), }, } } -func (d *dataSourceDatabase) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - conn := d.Meta().TimestreamWriteClient(ctx) +const ( + DSNameDatabase = "Database Data Source" +) + +func dataSourceDatabaseRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics - var data dataSourceDatabaseData - resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) - if resp.Diagnostics.HasError() { - return - } + conn := meta.(*conns.AWSClient).TimestreamWriteClient(ctx) + ignoreTagsConfig := meta.(*conns.AWSClient).IgnoreTagsConfig - out, err := findDatabaseByName(ctx, conn, data.DatabaseName.ValueString()) + name := d.Get("database_name").(string) + + out, err := findDatabaseByName(ctx, conn, name) if err != nil { - resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.TimestreamWrite, create.ErrActionReading, DSNameDatabase, data.DatabaseName.String(), err), - err.Error(), - ) - return + return create.AppendDiagError(diags, names.TimestreamWrite, create.ErrActionReading, DSNameDatabase, name, err) } - data.ARN = flex.StringToFramework(ctx, out.Arn) - data.DatabaseName = flex.StringToFramework(ctx, out.DatabaseName) - data.KmsKeyId = flex.StringToFramework(ctx, out.KmsKeyId) - data.TableCount = flex.Int64ToFramework(ctx, &out.TableCount) + d.SetId(aws.ToString(out.DatabaseName)) + + d.Set("arn", out.Arn) + d.Set("database_name", out.DatabaseName) + d.Set("kms_key_id", out.KmsKeyId) + d.Set("table_count", out.TableCount) - tags, err := listTags(ctx, conn, *out.DatabaseName) + tags, err := listTags(ctx, conn, d.Get("arn").(string)) if err != nil { - resp.Diagnostics.AddError( - create.ProblemStandardMessage(names.TimestreamWrite, create.ErrActionReading, DSNameDatabase, data.DatabaseName.String(), err), - err.Error(), - ) - return + return diag.Errorf("listing tags for timestream table (%s): %s", d.Id(), err) } - ignoreTagsConfig := d.Meta().IgnoreTagsConfig - data.Tags = flex.FlattenFrameworkStringValueMapLegacy(ctx, tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig).Map()) + if err := d.Set("tags", tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig).Map()); err != nil { + return diag.Errorf("setting tags: %s", err) + } - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} + if err := d.Set("tags_all", tags.Map()); err != nil { + return diag.Errorf("setting tags_all: %s", err) + } -type dataSourceDatabaseData struct { - ARN types.String `tfsdk:"arn"` - DatabaseName types.String `tfsdk:"database_name"` - KmsKeyId types.String `tfsdk:"kms_key_id"` - TableCount types.Int64 `tfsdk:"table_count"` - Tags types.Map `tfsdk:"tags"` + return diags } diff --git a/internal/service/timestreamwrite/database_data_source_test.go b/internal/service/timestreamwrite/database_data_source_test.go index 4d9e2107b9f7..df4b587426a5 100644 --- a/internal/service/timestreamwrite/database_data_source_test.go +++ b/internal/service/timestreamwrite/database_data_source_test.go @@ -11,6 +11,7 @@ import ( sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/names" ) @@ -21,23 +22,24 @@ func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) { t.Skip("skipping long-running test in short mode") } + // var database types.Database + rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) dataSourceName := "data.aws_timestreamwrite_database.test" - resourceName := "aws_timestreamwrite_database.test" - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t) }, + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: nil, + CheckDestroy: testAccCheckDatabaseDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccDatabaseDataSourceConfig_basic(rName), + Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrPair(dataSourceName, "arn", resourceName, "arn"), - resource.TestCheckResourceAttr(dataSourceName, "database_name", rName), - acctest.MatchResourceAttrRegionalARN(resourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), - resource.TestCheckResourceAttr(dataSourceName, "table_count", "0"), + testAccCheckDatabaseExists(ctx, dataSourceName), + acctest.CheckResourceAttrRegionalARN(dataSourceName, "arn", "timestream", fmt.Sprintf("database/%s", rDatabaseName)), + resource.TestCheckResourceAttr(dataSourceName, "database_name", rDatabaseName), + acctest.MatchResourceAttrRegionalARN(dataSourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), + resource.TestCheckResourceAttr(dataSourceName, "tags.%", "0"), ), }, }, @@ -46,11 +48,35 @@ func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) { func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { ctx := acctest.Context(t) + rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + rKmsKeyName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_timestreamwrite_database.test" + kmsResourceName := "aws_kms_key.test" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckDatabaseDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName), + Check: resource.ComposeTestCheckFunc( + testAccCheckDatabaseExists(ctx, dataSourceName), + resource.TestCheckResourceAttr(dataSourceName, "database_name", rDatabaseName), + resource.TestCheckResourceAttrPair(dataSourceName, "kms_key_id", kmsResourceName, "arn"), + ), + }, + }, + }) +} + +func TestAccTimestreamWriteDatabaseDataSource_updateKMSKey(t *testing.T) { + ctx := acctest.Context(t) + rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + rKmsKeyName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) dataSourceName := "data.aws_timestreamwrite_database.test" kmsResourceName := "aws_kms_key.test" - resourceName := "aws_timestreamwrite_database.test" - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, @@ -59,16 +85,25 @@ func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { CheckDestroy: testAccCheckDatabaseDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccDatabaseDataSourceConfig_kmsKey(rName), + Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(dataSourceName, "database_name", rName), + testAccCheckDatabaseExists(ctx, dataSourceName), + acctest.MatchResourceAttrRegionalARN(dataSourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), + ), + }, + { + Config: testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName), + Check: resource.ComposeTestCheckFunc( + testAccCheckDatabaseExists(ctx, dataSourceName), resource.TestCheckResourceAttrPair(dataSourceName, "kms_key_id", kmsResourceName, "arn"), ), }, { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, + Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName), + Check: resource.ComposeTestCheckFunc( + testAccCheckDatabaseExists(ctx, dataSourceName), + acctest.MatchResourceAttrRegionalARN(dataSourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), + ), }, }, }) @@ -76,10 +111,8 @@ func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { func TestAccTimestreamWriteDatabaseDataSource_tags(t *testing.T) { ctx := acctest.Context(t) - + rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) dataSourceName := "data.aws_timestreamwrite_database.test" - resourceName := "aws_timestreamwrite_database.test" - rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, @@ -88,8 +121,9 @@ func TestAccTimestreamWriteDatabaseDataSource_tags(t *testing.T) { CheckDestroy: testAccCheckDatabaseDestroy(ctx), Steps: []resource.TestStep{ { - Config: testAccDatabaseConfig_tags1(rName, "key1", "value1"), + Config: testAccDatabaseDataSourceConfig_tags1(rDatabaseName, "key1", "value1"), Check: resource.ComposeTestCheckFunc( + testAccCheckDatabaseExists(ctx, dataSourceName), resource.TestCheckResourceAttr(dataSourceName, "tags.%", "1"), resource.TestCheckResourceAttr(dataSourceName, "tags.key1", "value1"), resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "1"), @@ -97,8 +131,9 @@ func TestAccTimestreamWriteDatabaseDataSource_tags(t *testing.T) { ), }, { - Config: testAccDatabaseConfig_tags2(rName, "key1", "value1updated", "key2", "value2"), + Config: testAccDatabaseDataSourceConfig_tags2(rDatabaseName, "key1", "value1updated", "key2", "value2"), Check: resource.ComposeTestCheckFunc( + testAccCheckDatabaseExists(ctx, dataSourceName), resource.TestCheckResourceAttr(dataSourceName, "tags.%", "2"), resource.TestCheckResourceAttr(dataSourceName, "tags.key1", "value1updated"), resource.TestCheckResourceAttr(dataSourceName, "tags.key2", "value2"), @@ -108,36 +143,32 @@ func TestAccTimestreamWriteDatabaseDataSource_tags(t *testing.T) { ), }, { - Config: testAccDatabaseConfig_tags1(rName, "key2", "value2"), + Config: testAccDatabaseDataSourceConfig_tags1(rDatabaseName, "key2", "value2"), Check: resource.ComposeTestCheckFunc( + testAccCheckDatabaseExists(ctx, dataSourceName), resource.TestCheckResourceAttr(dataSourceName, "tags.%", "1"), resource.TestCheckResourceAttr(dataSourceName, "tags.key2", "value2"), resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "1"), resource.TestCheckResourceAttr(dataSourceName, "tags_all.key2", "value2"), ), }, - { - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, }, }) } -func testAccDatabaseDataSourceConfig_basic(rName string) string { +func testAccDatabaseDataSourceConfig_basic(rDatabaseName string) string { return fmt.Sprintf(` resource "aws_timestreamwrite_database" "test" { - database_name = %[1]q -} + database_name = %[1]q + } data "aws_timestreamwrite_database" "test" { database_name = aws_timestreamwrite_database.test.database_name -} -`, rName) + } +`, rDatabaseName) } -func testAccDatabaseDataSourceConfig_kmsKey(rName string) string { +func testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName string) string { return fmt.Sprintf(` resource "aws_kms_key" "test" { description = %[1]q @@ -160,17 +191,17 @@ POLICY } resource "aws_timestreamwrite_database" "test" { - database_name = %[1]q + database_name = %[2]q kms_key_id = aws_kms_key.test.arn } data "aws_timestreamwrite_database" "test" { - database_name = aws_timestreamwrite_database.test.database_name -} -`, rName) + database_name = aws_timestreamwrite_database.test.database_name + } +`, rKmsKeyName, rDatabaseName) } -func testAccDatabaseDataSourceConfig_tags1(rName, tagKey1, tagValue1 string) string { +func testAccDatabaseDataSourceConfig_tags1(rDatabaseName, tagKey1, tagValue1 string) string { return fmt.Sprintf(` resource "aws_timestreamwrite_database" "test" { database_name = %[1]q @@ -182,11 +213,11 @@ resource "aws_timestreamwrite_database" "test" { data "aws_timestreamwrite_database" "test" { database_name = aws_timestreamwrite_database.test.database_name -} -`, rName, tagKey1, tagValue1) + } +`, rDatabaseName, tagKey1, tagValue1) } -func testAccDatabaseDataSourceConfig_tags2(rName, tagKey1, tagValue1, tagKey2, tagValue2 string) string { +func testAccDatabaseDataSourceConfig_tags2(rDatabaseName, tagKey1, tagValue1, tagKey2, tagValue2 string) string { return fmt.Sprintf(` resource "aws_timestreamwrite_database" "test" { database_name = %[1]q @@ -199,6 +230,6 @@ resource "aws_timestreamwrite_database" "test" { data "aws_timestreamwrite_database" "test" { database_name = aws_timestreamwrite_database.test.database_name -} -`, rName, tagKey1, tagValue1, tagKey2, tagValue2) + } +`, rDatabaseName, tagKey1, tagValue1, tagKey2, tagValue2) } From 0dacbe01e1394e2a140fc67dcd1bcb377ed3c8b1 Mon Sep 17 00:00:00 2001 From: ThomasZalewski Date: Mon, 15 Jul 2024 11:43:29 -0400 Subject: [PATCH 08/10] Updated datasource to framework. --- .../timestreamwrite/database_data_source.go | 124 +++++++++-------- .../database_data_source_test.go | 125 ++++-------------- .../d/timestreamwrite_database.html.markdown | 12 +- 3 files changed, 103 insertions(+), 158 deletions(-) diff --git a/internal/service/timestreamwrite/database_data_source.go b/internal/service/timestreamwrite/database_data_source.go index 6e5607dd2b2a..5e99a94a535f 100644 --- a/internal/service/timestreamwrite/database_data_source.go +++ b/internal/service/timestreamwrite/database_data_source.go @@ -1,85 +1,105 @@ +package timestreamwrite + // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 -package timestreamwrite - import ( "context" - "github.com/aws/aws-sdk-go-v2/aws" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-aws/internal/conns" + "github.com/aws/aws-sdk-go-v2/service/timestreamwrite" + "github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-provider-aws/internal/create" - tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" + "github.com/hashicorp/terraform-provider-aws/internal/framework" + "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" "github.com/hashicorp/terraform-provider-aws/names" ) -// Function annotations are used for datasource registration to the Provider. DO NOT EDIT. -// @SDKDataSource("aws_timestreamwrite_database", name="Database") -func DataSourceDatabase() *schema.Resource { - return &schema.Resource{ - ReadWithoutTimeout: dataSourceDatabaseRead, +// @FrameworkDataSource(name="Database") +func newDataSourceDatabase(context.Context) (datasource.DataSourceWithConfigure, error) { + return &dataSourceDatabase{}, nil +} + +type dataSourceDatabase struct { + framework.DataSourceWithConfigure +} - Schema: map[string]*schema.Schema{ - "arn": { - Type: schema.TypeString, +const ( + DSNameDatabase = "Database data source" +) + +func (d *dataSourceDatabase) Metadata(_ context.Context, request datasource.MetadataRequest, response *datasource.MetadataResponse) { + response.TypeName = "aws_timestreamwrite_database" +} + +func (d *dataSourceDatabase) Schema(ctx context.Context, request datasource.SchemaRequest, response *datasource.SchemaResponse) { + response.Schema = schema.Schema{ + Attributes: map[string]schema.Attribute{ + names.AttrARN: schema.StringAttribute{ Computed: true, + Validators: []validator.String{ + stringvalidator.LengthBetween(3, 256), + }, }, - "database_name": { - Type: schema.TypeString, + names.AttrName: schema.StringAttribute{ Required: true, }, - "kms_key_id": { - Type: schema.TypeString, + names.AttrCreatedTime: schema.StringAttribute{ + CustomType: timetypes.RFC3339Type{}, + Computed: true, + }, + names.AttrKMSKeyID: schema.StringAttribute{ Computed: true, }, - "table_count": { - Type: schema.TypeInt, + "table_count": schema.Int64Attribute{ Computed: true, }, - names.AttrTags: tftags.TagsSchema(), - names.AttrTagsAll: tftags.TagsSchemaComputed(), + names.AttrLastUpdatedTime: schema.StringAttribute{ + CustomType: timetypes.RFC3339Type{}, + Computed: true, + }, }, } } -const ( - DSNameDatabase = "Database Data Source" -) - -func dataSourceDatabaseRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - var diags diag.Diagnostics - - conn := meta.(*conns.AWSClient).TimestreamWriteClient(ctx) - ignoreTagsConfig := meta.(*conns.AWSClient).IgnoreTagsConfig - - name := d.Get("database_name").(string) +func (d *dataSourceDatabase) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + conn := d.Meta().TimestreamWriteClient(ctx) + var data dsDescribeDatabase - out, err := findDatabaseByName(ctx, conn, name) - if err != nil { - return create.AppendDiagError(diags, names.TimestreamWrite, create.ErrActionReading, DSNameDatabase, name, err) + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + in := ×treamwrite.DescribeDatabaseInput{ + DatabaseName: data.Name.ValueStringPointer(), } - d.SetId(aws.ToString(out.DatabaseName)) - - d.Set("arn", out.Arn) - d.Set("database_name", out.DatabaseName) - d.Set("kms_key_id", out.KmsKeyId) - d.Set("table_count", out.TableCount) + desc, err := conn.DescribeDatabase(ctx, in) - tags, err := listTags(ctx, conn, d.Get("arn").(string)) if err != nil { - return diag.Errorf("listing tags for timestream table (%s): %s", d.Id(), err) + resp.Diagnostics.AddError( + create.ProblemStandardMessage(names.Transfer, create.ErrActionReading, DSNameDatabase, data.Name.String(), err), + err.Error(), + ) + return } - if err := d.Set("tags", tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig).Map()); err != nil { - return diag.Errorf("setting tags: %s", err) - } - - if err := d.Set("tags_all", tags.Map()); err != nil { - return diag.Errorf("setting tags_all: %s", err) + resp.Diagnostics.Append(flex.Flatten(ctx, desc.Database, &data)...) + if resp.Diagnostics.HasError() { + return } + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} - return diags +type dsDescribeDatabase struct { + ARN types.String `tfsdk:"arn"` + CreatedTime timetypes.RFC3339 `tfsdk:"created_time"` + Name types.String `tfsdk:"name"` + KmsKeyID types.String `tfsdk:"kms_key_id"` + LastUpdatedTime timetypes.RFC3339 `tfsdk:"last_updated_time"` + TableCount types.Int64 `tfsdk:"table_count"` } diff --git a/internal/service/timestreamwrite/database_data_source_test.go b/internal/service/timestreamwrite/database_data_source_test.go index df4b587426a5..812ba27cf5b9 100644 --- a/internal/service/timestreamwrite/database_data_source_test.go +++ b/internal/service/timestreamwrite/database_data_source_test.go @@ -22,8 +22,8 @@ func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) { t.Skip("skipping long-running test in short mode") } - // var database types.Database rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_timestreamwrite_database.test" dataSourceName := "data.aws_timestreamwrite_database.test" resource.ParallelTest(t, resource.TestCase{ @@ -35,11 +35,12 @@ func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) { { Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName), Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - acctest.CheckResourceAttrRegionalARN(dataSourceName, "arn", "timestream", fmt.Sprintf("database/%s", rDatabaseName)), - resource.TestCheckResourceAttr(dataSourceName, "database_name", rDatabaseName), - acctest.MatchResourceAttrRegionalARN(dataSourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), - resource.TestCheckResourceAttr(dataSourceName, "tags.%", "0"), + testAccCheckDatabaseExists(ctx, resourceName), + resource.TestCheckResourceAttrPair(dataSourceName, names.AttrARN, resourceName, names.AttrARN), + resource.TestCheckResourceAttrPair(dataSourceName, names.AttrName, resourceName, names.AttrDatabaseName), + resource.TestCheckResourceAttrPair(dataSourceName, names.AttrCreatedTime, resourceName, names.AttrCreatedTime), + resource.TestCheckResourceAttrPair(dataSourceName, names.AttrKMSKeyID, resourceName, names.AttrKMSKeyID), + resource.TestCheckResourceAttrPair(dataSourceName, "table_count", resourceName, "table_count"), ), }, }, @@ -52,6 +53,7 @@ func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { rKmsKeyName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) dataSourceName := "data.aws_timestreamwrite_database.test" kmsResourceName := "aws_kms_key.test" + resourceName := "aws_timestreamwrite_database.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, @@ -62,9 +64,9 @@ func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) { { Config: testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName), Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - resource.TestCheckResourceAttr(dataSourceName, "database_name", rDatabaseName), - resource.TestCheckResourceAttrPair(dataSourceName, "kms_key_id", kmsResourceName, "arn"), + testAccCheckDatabaseExists(ctx, resourceName), + resource.TestCheckResourceAttrPair(dataSourceName, names.AttrName, resourceName, names.AttrDatabaseName), + resource.TestCheckResourceAttrPair(dataSourceName, names.AttrKMSKeyID, kmsResourceName, names.AttrARN), ), }, }, @@ -75,6 +77,7 @@ func TestAccTimestreamWriteDatabaseDataSource_updateKMSKey(t *testing.T) { ctx := acctest.Context(t) rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) rKmsKeyName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_timestreamwrite_database.test" dataSourceName := "data.aws_timestreamwrite_database.test" kmsResourceName := "aws_kms_key.test" @@ -87,69 +90,22 @@ func TestAccTimestreamWriteDatabaseDataSource_updateKMSKey(t *testing.T) { { Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName), Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - acctest.MatchResourceAttrRegionalARN(dataSourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), + testAccCheckDatabaseExists(ctx, resourceName), + acctest.MatchResourceAttrRegionalARN(dataSourceName, names.AttrKMSKeyID, "kms", regexache.MustCompile(`key/.+`)), ), }, { Config: testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName), Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - resource.TestCheckResourceAttrPair(dataSourceName, "kms_key_id", kmsResourceName, "arn"), + testAccCheckDatabaseExists(ctx, resourceName), + resource.TestCheckResourceAttrPair(dataSourceName, names.AttrKMSKeyID, kmsResourceName, names.AttrARN), ), }, { Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName), Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - acctest.MatchResourceAttrRegionalARN(dataSourceName, "kms_key_id", "kms", regexache.MustCompile(`key/.+`)), - ), - }, - }, - }) -} - -func TestAccTimestreamWriteDatabaseDataSource_tags(t *testing.T) { - ctx := acctest.Context(t) - rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) - dataSourceName := "data.aws_timestreamwrite_database.test" - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID), - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, - CheckDestroy: testAccCheckDatabaseDestroy(ctx), - Steps: []resource.TestStep{ - { - Config: testAccDatabaseDataSourceConfig_tags1(rDatabaseName, "key1", "value1"), - Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - resource.TestCheckResourceAttr(dataSourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(dataSourceName, "tags.key1", "value1"), - resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "1"), - resource.TestCheckResourceAttr(dataSourceName, "tags_all.key1", "value1"), - ), - }, - { - Config: testAccDatabaseDataSourceConfig_tags2(rDatabaseName, "key1", "value1updated", "key2", "value2"), - Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - resource.TestCheckResourceAttr(dataSourceName, "tags.%", "2"), - resource.TestCheckResourceAttr(dataSourceName, "tags.key1", "value1updated"), - resource.TestCheckResourceAttr(dataSourceName, "tags.key2", "value2"), - resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "2"), - resource.TestCheckResourceAttr(dataSourceName, "tags_all.key1", "value1updated"), - resource.TestCheckResourceAttr(dataSourceName, "tags_all.key2", "value2"), - ), - }, - { - Config: testAccDatabaseDataSourceConfig_tags1(rDatabaseName, "key2", "value2"), - Check: resource.ComposeTestCheckFunc( - testAccCheckDatabaseExists(ctx, dataSourceName), - resource.TestCheckResourceAttr(dataSourceName, "tags.%", "1"), - resource.TestCheckResourceAttr(dataSourceName, "tags.key2", "value2"), - resource.TestCheckResourceAttr(dataSourceName, "tags_all.%", "1"), - resource.TestCheckResourceAttr(dataSourceName, "tags_all.key2", "value2"), + testAccCheckDatabaseExists(ctx, resourceName), + acctest.MatchResourceAttrRegionalARN(dataSourceName, names.AttrKMSKeyID, "kms", regexache.MustCompile(`key/.+`)), ), }, }, @@ -159,12 +115,12 @@ func TestAccTimestreamWriteDatabaseDataSource_tags(t *testing.T) { func testAccDatabaseDataSourceConfig_basic(rDatabaseName string) string { return fmt.Sprintf(` resource "aws_timestreamwrite_database" "test" { - database_name = %[1]q - } + database_name = %[1]q +} data "aws_timestreamwrite_database" "test" { - database_name = aws_timestreamwrite_database.test.database_name - } + name = aws_timestreamwrite_database.test.database_name +} `, rDatabaseName) } @@ -196,40 +152,7 @@ resource "aws_timestreamwrite_database" "test" { } data "aws_timestreamwrite_database" "test" { - database_name = aws_timestreamwrite_database.test.database_name - } -`, rKmsKeyName, rDatabaseName) -} - -func testAccDatabaseDataSourceConfig_tags1(rDatabaseName, tagKey1, tagValue1 string) string { - return fmt.Sprintf(` -resource "aws_timestreamwrite_database" "test" { - database_name = %[1]q - - tags = { - %[2]q = %[3]q - } -} - -data "aws_timestreamwrite_database" "test" { - database_name = aws_timestreamwrite_database.test.database_name - } -`, rDatabaseName, tagKey1, tagValue1) -} - -func testAccDatabaseDataSourceConfig_tags2(rDatabaseName, tagKey1, tagValue1, tagKey2, tagValue2 string) string { - return fmt.Sprintf(` -resource "aws_timestreamwrite_database" "test" { - database_name = %[1]q - - tags = { - %[2]q = %[3]q - %[4]q = %[5]q - } + name = aws_timestreamwrite_database.test.database_name } - -data "aws_timestreamwrite_database" "test" { - database_name = aws_timestreamwrite_database.test.database_name - } -`, rDatabaseName, tagKey1, tagValue1, tagKey2, tagValue2) +`, rKmsKeyName, rDatabaseName) } diff --git a/website/docs/d/timestreamwrite_database.html.markdown b/website/docs/d/timestreamwrite_database.html.markdown index d2e15be51c84..5acacf0bbe9b 100644 --- a/website/docs/d/timestreamwrite_database.html.markdown +++ b/website/docs/d/timestreamwrite_database.html.markdown @@ -15,8 +15,8 @@ Terraform data source for managing an AWS Timestream Write Database. ### Basic Usage ```terraform -data "aws_timestreamwrite_database" "example" { - database_name = "database-example" +data "aws_timestreamwrite_database" "test" { + name = "database-example" } ``` @@ -24,13 +24,15 @@ data "aws_timestreamwrite_database" "example" { The following arguments are required: -* `database_name` – (Required) The name of the Timestream database. Minimum length of 3. Maximum length of 64. +* `database_name` – (Required) The name of the Timestream database. Minimum length of 3. Maximum length of 256. ## Attribute Reference This data source exports the following attributes in addition to the arguments above: -* `id` - The name of the Timestream database. * `arn` - The ARN that uniquely identifies this database. +* `created_time` - Creation time of database. +* `database_name` – (Required) The name of the Timestream database. Minimum length of 3. Maximum length of 256. * `kms_key_id` - The ARN of the KMS key used to encrypt the data stored in the database. -* `table_count` - The total number of tables found within the Timestream database. +* `last_updated_time` - Last time database was updated. +* `table_count` - Total number of tables in the Timestream database. From 961e332b468b980df971e123c6aa0eab13b5292a Mon Sep 17 00:00:00 2001 From: ThomasZalewski Date: Mon, 15 Jul 2024 12:18:43 -0400 Subject: [PATCH 09/10] Ci formatting change. --- internal/service/timestreamwrite/database_data_source_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/service/timestreamwrite/database_data_source_test.go b/internal/service/timestreamwrite/database_data_source_test.go index 812ba27cf5b9..b9de6657c049 100644 --- a/internal/service/timestreamwrite/database_data_source_test.go +++ b/internal/service/timestreamwrite/database_data_source_test.go @@ -11,7 +11,6 @@ import ( sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-aws/internal/acctest" - "github.com/hashicorp/terraform-provider-aws/names" ) From f14123fed6f7ccfdec8fd640a6893c1a809177cb Mon Sep 17 00:00:00 2001 From: ThomasZalewski Date: Tue, 16 Jul 2024 12:18:19 -0400 Subject: [PATCH 10/10] Changed header ordering --- internal/service/timestreamwrite/database_data_source.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/service/timestreamwrite/database_data_source.go b/internal/service/timestreamwrite/database_data_source.go index 5e99a94a535f..04163dadbcc6 100644 --- a/internal/service/timestreamwrite/database_data_source.go +++ b/internal/service/timestreamwrite/database_data_source.go @@ -1,8 +1,8 @@ -package timestreamwrite - // Copyright (c) HashiCorp, Inc. // SPDX-License-Identifier: MPL-2.0 +package timestreamwrite + import ( "context"