Skip to content

Commit

Permalink
Merge pull request #36368 from hunttom/f-aws_timestreamwrite_database…
Browse files Browse the repository at this point in the history
…-new-datasource

New Datasource for AWS Timestream Database
  • Loading branch information
ThomasZalewski authored Jul 16, 2024
2 parents c7ca413 + f14123f commit 56be68d
Show file tree
Hide file tree
Showing 5 changed files with 309 additions and 1 deletion.
3 changes: 3 additions & 0 deletions .changelog/36368.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:new-data-source
aws_timestreamwrite_database
```
105 changes: 105 additions & 0 deletions internal/service/timestreamwrite/database_data_source.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0

package timestreamwrite

import (
"context"

"github.com/aws/aws-sdk-go-v2/service/timestreamwrite"
"github.com/hashicorp/terraform-plugin-framework-timetypes/timetypes"
"github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
"github.com/hashicorp/terraform-plugin-framework/datasource"
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-provider-aws/internal/create"
"github.com/hashicorp/terraform-provider-aws/internal/framework"
"github.com/hashicorp/terraform-provider-aws/internal/framework/flex"
"github.com/hashicorp/terraform-provider-aws/names"
)

// @FrameworkDataSource(name="Database")
func newDataSourceDatabase(context.Context) (datasource.DataSourceWithConfigure, error) {
return &dataSourceDatabase{}, nil
}

type dataSourceDatabase struct {
framework.DataSourceWithConfigure
}

const (
DSNameDatabase = "Database data source"
)

func (d *dataSourceDatabase) Metadata(_ context.Context, request datasource.MetadataRequest, response *datasource.MetadataResponse) {
response.TypeName = "aws_timestreamwrite_database"
}

func (d *dataSourceDatabase) Schema(ctx context.Context, request datasource.SchemaRequest, response *datasource.SchemaResponse) {
response.Schema = schema.Schema{
Attributes: map[string]schema.Attribute{
names.AttrARN: schema.StringAttribute{
Computed: true,
Validators: []validator.String{
stringvalidator.LengthBetween(3, 256),
},
},
names.AttrName: schema.StringAttribute{
Required: true,
},
names.AttrCreatedTime: schema.StringAttribute{
CustomType: timetypes.RFC3339Type{},
Computed: true,
},
names.AttrKMSKeyID: schema.StringAttribute{
Computed: true,
},
"table_count": schema.Int64Attribute{
Computed: true,
},
names.AttrLastUpdatedTime: schema.StringAttribute{
CustomType: timetypes.RFC3339Type{},
Computed: true,
},
},
}
}

func (d *dataSourceDatabase) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
conn := d.Meta().TimestreamWriteClient(ctx)
var data dsDescribeDatabase

resp.Diagnostics.Append(req.Config.Get(ctx, &data)...)
if resp.Diagnostics.HasError() {
return
}
in := &timestreamwrite.DescribeDatabaseInput{
DatabaseName: data.Name.ValueStringPointer(),
}

desc, err := conn.DescribeDatabase(ctx, in)

if err != nil {
resp.Diagnostics.AddError(
create.ProblemStandardMessage(names.Transfer, create.ErrActionReading, DSNameDatabase, data.Name.String(), err),
err.Error(),
)
return
}

resp.Diagnostics.Append(flex.Flatten(ctx, desc.Database, &data)...)
if resp.Diagnostics.HasError() {
return
}
resp.Diagnostics.Append(resp.State.Set(ctx, &data)...)
}

type dsDescribeDatabase struct {
ARN types.String `tfsdk:"arn"`
CreatedTime timetypes.RFC3339 `tfsdk:"created_time"`
Name types.String `tfsdk:"name"`
KmsKeyID types.String `tfsdk:"kms_key_id"`
LastUpdatedTime timetypes.RFC3339 `tfsdk:"last_updated_time"`
TableCount types.Int64 `tfsdk:"table_count"`
}
157 changes: 157 additions & 0 deletions internal/service/timestreamwrite/database_data_source_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0

package timestreamwrite_test

import (
"fmt"
"testing"

"github.com/YakDriver/regexache"
sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest"
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-provider-aws/internal/acctest"
"github.com/hashicorp/terraform-provider-aws/names"
)

func TestAccTimestreamWriteDatabaseDataSource_basic(t *testing.T) {
ctx := acctest.Context(t)

if testing.Short() {
t.Skip("skipping long-running test in short mode")
}

rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
resourceName := "aws_timestreamwrite_database.test"
dataSourceName := "data.aws_timestreamwrite_database.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) },
ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
CheckDestroy: testAccCheckDatabaseDestroy(ctx),
Steps: []resource.TestStep{
{
Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDatabaseExists(ctx, resourceName),
resource.TestCheckResourceAttrPair(dataSourceName, names.AttrARN, resourceName, names.AttrARN),
resource.TestCheckResourceAttrPair(dataSourceName, names.AttrName, resourceName, names.AttrDatabaseName),
resource.TestCheckResourceAttrPair(dataSourceName, names.AttrCreatedTime, resourceName, names.AttrCreatedTime),
resource.TestCheckResourceAttrPair(dataSourceName, names.AttrKMSKeyID, resourceName, names.AttrKMSKeyID),
resource.TestCheckResourceAttrPair(dataSourceName, "table_count", resourceName, "table_count"),
),
},
},
})
}

func TestAccTimestreamWriteDatabaseDataSource_kmsKey(t *testing.T) {
ctx := acctest.Context(t)
rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
rKmsKeyName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
dataSourceName := "data.aws_timestreamwrite_database.test"
kmsResourceName := "aws_kms_key.test"
resourceName := "aws_timestreamwrite_database.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) },
ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
CheckDestroy: testAccCheckDatabaseDestroy(ctx),
Steps: []resource.TestStep{
{
Config: testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDatabaseExists(ctx, resourceName),
resource.TestCheckResourceAttrPair(dataSourceName, names.AttrName, resourceName, names.AttrDatabaseName),
resource.TestCheckResourceAttrPair(dataSourceName, names.AttrKMSKeyID, kmsResourceName, names.AttrARN),
),
},
},
})
}

func TestAccTimestreamWriteDatabaseDataSource_updateKMSKey(t *testing.T) {
ctx := acctest.Context(t)
rDatabaseName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
rKmsKeyName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
resourceName := "aws_timestreamwrite_database.test"
dataSourceName := "data.aws_timestreamwrite_database.test"
kmsResourceName := "aws_kms_key.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { acctest.PreCheck(ctx, t); testAccPreCheck(ctx, t) },
ErrorCheck: acctest.ErrorCheck(t, names.TimestreamWriteServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
CheckDestroy: testAccCheckDatabaseDestroy(ctx),
Steps: []resource.TestStep{
{
Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDatabaseExists(ctx, resourceName),
acctest.MatchResourceAttrRegionalARN(dataSourceName, names.AttrKMSKeyID, "kms", regexache.MustCompile(`key/.+`)),
),
},
{
Config: testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDatabaseExists(ctx, resourceName),
resource.TestCheckResourceAttrPair(dataSourceName, names.AttrKMSKeyID, kmsResourceName, names.AttrARN),
),
},
{
Config: testAccDatabaseDataSourceConfig_basic(rDatabaseName),
Check: resource.ComposeTestCheckFunc(
testAccCheckDatabaseExists(ctx, resourceName),
acctest.MatchResourceAttrRegionalARN(dataSourceName, names.AttrKMSKeyID, "kms", regexache.MustCompile(`key/.+`)),
),
},
},
})
}

func testAccDatabaseDataSourceConfig_basic(rDatabaseName string) string {
return fmt.Sprintf(`
resource "aws_timestreamwrite_database" "test" {
database_name = %[1]q
}
data "aws_timestreamwrite_database" "test" {
name = aws_timestreamwrite_database.test.database_name
}
`, rDatabaseName)
}

func testAccDatabaseDataSourceConfig_kmsKey(rDatabaseName, rKmsKeyName string) string {
return fmt.Sprintf(`
resource "aws_kms_key" "test" {
description = %[1]q
policy = <<POLICY
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": "kms:*",
"Resource": "*"
}
]
}
POLICY
}
resource "aws_timestreamwrite_database" "test" {
database_name = %[2]q
kms_key_id = aws_kms_key.test.arn
}
data "aws_timestreamwrite_database" "test" {
name = aws_timestreamwrite_database.test.database_name
}
`, rKmsKeyName, rDatabaseName)
}
7 changes: 6 additions & 1 deletion internal/service/timestreamwrite/service_package_gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

38 changes: 38 additions & 0 deletions website/docs/d/timestreamwrite_database.html.markdown
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
---
subcategory: "Timestream Write"
layout: "aws"
page_title: "AWS: aws_timestreamwrite_database"
description: |-
Terraform data source for managing an AWS Timestream Write Database.
---

# Data Source: aws_timestreamwrite_database

Terraform data source for managing an AWS Timestream Write Database.

## Example Usage

### Basic Usage

```terraform
data "aws_timestreamwrite_database" "test" {
name = "database-example"
}
```

## Argument Reference

The following arguments are required:

* `database_name` – (Required) The name of the Timestream database. Minimum length of 3. Maximum length of 256.

## Attribute Reference

This data source exports the following attributes in addition to the arguments above:

* `arn` - The ARN that uniquely identifies this database.
* `created_time` - Creation time of database.
* `database_name` – (Required) The name of the Timestream database. Minimum length of 3. Maximum length of 256.
* `kms_key_id` - The ARN of the KMS key used to encrypt the data stored in the database.
* `last_updated_time` - Last time database was updated.
* `table_count` - Total number of tables in the Timestream database.

0 comments on commit 56be68d

Please sign in to comment.