Skip to content

Commit

Permalink
Merge pull request #38569 from acwwat/f-aws_pipes_pipe-add_include_ex…
Browse files Browse the repository at this point in the history
…ecution_data_arg

feat: add log_configuration.include_execution_data arg for aws_pipes_pipe
  • Loading branch information
ewbankkit authored Jul 29, 2024
2 parents 1990c0b + c0eee72 commit 0df57b7
Show file tree
Hide file tree
Showing 4 changed files with 151 additions and 15 deletions.
3 changes: 3 additions & 0 deletions .changelog/38569.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
resource/aws_pipes_pipe: Add `log_configuration.include_execution_data` argument
```
43 changes: 30 additions & 13 deletions internal/service/pipes/log_configuration.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"github.com/aws/aws-sdk-go-v2/service/pipes/types"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-provider-aws/internal/enum"
"github.com/hashicorp/terraform-provider-aws/internal/flex"
"github.com/hashicorp/terraform-provider-aws/names"
)

Expand All @@ -18,11 +19,6 @@ func logConfigurationSchema() *schema.Schema {
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"level": {
Type: schema.TypeString,
Required: true,
ValidateDiagFunc: enum.Validate[types.LogLevel](),
},
"cloudwatch_logs_log_destination": {
Type: schema.TypeList,
Optional: true,
Expand All @@ -49,6 +45,19 @@ func logConfigurationSchema() *schema.Schema {
},
},
},
"include_execution_data": {
Type: schema.TypeSet,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateDiagFunc: enum.Validate[types.IncludeExecutionDataOption](),
},
},
"level": {
Type: schema.TypeString,
Required: true,
ValidateDiagFunc: enum.Validate[types.LogLevel](),
},
"s3_log_destination": {
Type: schema.TypeList,
Optional: true,
Expand Down Expand Up @@ -87,10 +96,6 @@ func expandPipeLogConfigurationParameters(tfMap map[string]interface{}) *types.P

apiObject := &types.PipeLogConfigurationParameters{}

if v, ok := tfMap["level"].(string); ok && v != "" {
apiObject.Level = types.LogLevel(v)
}

if v, ok := tfMap["cloudwatch_logs_log_destination"].([]interface{}); ok && len(v) > 0 && v[0] != nil {
apiObject.CloudwatchLogsLogDestination = expandCloudWatchLogsLogDestinationParameters(v[0].(map[string]interface{}))
}
Expand All @@ -99,6 +104,14 @@ func expandPipeLogConfigurationParameters(tfMap map[string]interface{}) *types.P
apiObject.FirehoseLogDestination = expandFirehoseLogDestinationParameters(v[0].(map[string]interface{}))
}

if v, ok := tfMap["include_execution_data"].(*schema.Set); ok && v != nil {
apiObject.IncludeExecutionData = flex.ExpandStringyValueSet[types.IncludeExecutionDataOption](v)
}

if v, ok := tfMap["level"].(string); ok && v != "" {
apiObject.Level = types.LogLevel(v)
}

if v, ok := tfMap["s3_log_destination"].([]interface{}); ok && len(v) > 0 && v[0] != nil {
apiObject.S3LogDestination = expandS3LogDestinationParameters(v[0].(map[string]interface{}))
}
Expand Down Expand Up @@ -167,10 +180,6 @@ func flattenPipeLogConfiguration(apiObject *types.PipeLogConfiguration) map[stri

tfMap := map[string]interface{}{}

if v := apiObject.Level; v != "" {
tfMap["level"] = v
}

if v := apiObject.CloudwatchLogsLogDestination; v != nil {
tfMap["cloudwatch_logs_log_destination"] = []interface{}{flattenCloudWatchLogsLogDestination(v)}
}
Expand All @@ -179,6 +188,14 @@ func flattenPipeLogConfiguration(apiObject *types.PipeLogConfiguration) map[stri
tfMap["firehose_log_destination"] = []interface{}{flattenFirehoseLogDestination(v)}
}

if v := apiObject.IncludeExecutionData; v != nil {
tfMap["include_execution_data"] = flex.FlattenStringyValueList(v)
}

if v := apiObject.Level; v != "" {
tfMap["level"] = v
}

if v := apiObject.S3LogDestination; v != nil {
tfMap["s3_log_destination"] = []interface{}{flattenS3LogDestination(v)}
}
Expand Down
93 changes: 92 additions & 1 deletion internal/service/pipes/pipe_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -395,6 +395,69 @@ func TestAccPipesPipe_update_logConfiguration_cloudwatchLogsLogDestination(t *te
})
}

func TestAccPipesPipe_logConfiguration_includeExecutionData(t *testing.T) {
ctx := acctest.Context(t)
var pipe pipes.DescribePipeOutput
rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix)
resourceName := "aws_pipes_pipe.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() {
acctest.PreCheck(ctx, t)
acctest.PreCheckPartitionHasService(t, names.PipesEndpointID)
testAccPreCheck(ctx, t)
},
ErrorCheck: acctest.ErrorCheck(t, names.PipesServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
CheckDestroy: testAccCheckPipeDestroy(ctx),
Steps: []resource.TestStep{
{
Config: testAccPipeConfig_logConfiguration_includeExecutionData(rName, "null"),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckPipeExists(ctx, resourceName, &pipe),
acctest.MatchResourceAttrRegionalARN(resourceName, names.AttrARN, "pipes", regexache.MustCompile(regexp.QuoteMeta(`pipe/`+rName))),
resource.TestCheckResourceAttr(resourceName, "log_configuration.#", acctest.Ct1),
resource.TestCheckResourceAttr(resourceName, "log_configuration.0.include_execution_data.#", acctest.Ct0),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccPipeConfig_logConfiguration_includeExecutionData(rName, "[\"ALL\"]"),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckPipeExists(ctx, resourceName, &pipe),
acctest.MatchResourceAttrRegionalARN(resourceName, names.AttrARN, "pipes", regexache.MustCompile(regexp.QuoteMeta(`pipe/`+rName))),
resource.TestCheckResourceAttr(resourceName, "log_configuration.#", acctest.Ct1),
resource.TestCheckResourceAttr(resourceName, "log_configuration.0.include_execution_data.#", acctest.Ct1),
resource.TestCheckResourceAttr(resourceName, "log_configuration.0.include_execution_data.0", "ALL"),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccPipeConfig_logConfiguration_includeExecutionData(rName, "[]"),
Check: resource.ComposeAggregateTestCheckFunc(
testAccCheckPipeExists(ctx, resourceName, &pipe),
acctest.MatchResourceAttrRegionalARN(resourceName, names.AttrARN, "pipes", regexache.MustCompile(regexp.QuoteMeta(`pipe/`+rName))),
resource.TestCheckResourceAttr(resourceName, "log_configuration.#", acctest.Ct1),
resource.TestCheckResourceAttr(resourceName, "log_configuration.0.include_execution_data.#", acctest.Ct0),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}

func TestAccPipesPipe_sourceParameters_filterCriteria(t *testing.T) {
ctx := acctest.Context(t)
var pipe pipes.DescribePipeOutput
Expand Down Expand Up @@ -1989,6 +2052,34 @@ resource "aws_cloudwatch_log_group" "target" {
`, rName))
}

func testAccPipeConfig_logConfiguration_includeExecutionData(rName, includeExecutionData string) string {
return acctest.ConfigCompose(
testAccPipeConfig_base(rName),
testAccPipeConfig_baseSQSSource(rName),
testAccPipeConfig_baseSQSTarget(rName),
fmt.Sprintf(`
resource "aws_pipes_pipe" "test" {
depends_on = [aws_iam_role_policy.source, aws_iam_role_policy.target]
name = %[1]q
role_arn = aws_iam_role.test.arn
source = aws_sqs_queue.source.arn
target = aws_sqs_queue.target.arn
log_configuration {
include_execution_data = %[2]s
level = "INFO"
cloudwatch_logs_log_destination {
log_group_arn = aws_cloudwatch_log_group.target.arn
}
}
}
resource "aws_cloudwatch_log_group" "target" {
name = "%[1]s-target"
}
`, rName, includeExecutionData))
}

func testAccPipeConfig_sourceParameters_filterCriteria1(rName, criteria1 string) string {
return acctest.ConfigCompose(
testAccPipeConfig_base(rName),
Expand Down Expand Up @@ -2610,7 +2701,7 @@ resource "aws_iam_role_policy" "source" {
resource "aws_mq_broker" "source" {
broker_name = "%[1]s-source"
engine_type = "RabbitMQ"
engine_version = "3.8.11"
engine_version = "3.12.13"
host_instance_type = "mq.t3.micro"
authentication_strategy = "simple"
Expand Down
27 changes: 26 additions & 1 deletion website/docs/r/pipes_pipe.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,30 @@ resource "aws_pipes_pipe" "example" {
}
```

### CloudWatch Logs Logging Configuration Usage

```terraform
resource "aws_cloudwatch_log_group" "example" {
name = "example-pipe-target"
}
resource "aws_pipes_pipe" "example" {
depends_on = [aws_iam_role_policy.source, aws_iam_role_policy.target]
name = "example-pipe"
role_arn = aws_iam_role.example.arn
source = aws_sqs_queue.source.arn
target = aws_sqs_queue.target.arn
log_configuration {
include_execution_data = ["ALL"]
level = "INFO"
cloudwatch_logs_log_destination {
log_group_arn = aws_cloudwatch_log_group.target.arn
}
}
}
```

### SQS Source and Target Configuration Usage

```terraform
Expand Down Expand Up @@ -205,9 +229,10 @@ You can find out more about EventBridge Pipes Enrichment in the [User Guide](htt

You can find out more about EventBridge Pipes Enrichment in the [User Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-pipes-logs.html).

* `level` - (Required) The level of logging detail to include. Valid values `OFF`, `ERROR`, `INFO` and `TRACE`.
* `cloudwatch_logs_log_destination` - (Optional) Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
* `firehose_log_destination` - (Optional) Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
* `include_execution_data` - (Optional) String list that specifies whether the execution data (specifically, the `payload`, `awsRequest`, and `awsResponse` fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid values `ALL`.
* `level` - (Required) The level of logging detail to include. Valid values `OFF`, `ERROR`, `INFO` and `TRACE`.
* `s3_log_destination` - (Optional) Amazon S3 logging configuration settings for the pipe. Detailed below.

#### log_configuration.cloudwatch_logs_log_destination Configuration Block
Expand Down

0 comments on commit 0df57b7

Please sign in to comment.