Skip to content

Commit

Permalink
Add source_hash to aws_s3_bucket_object
Browse files Browse the repository at this point in the history
Allows one to store a hash in state to trigger resource
update.
  • Loading branch information
theophilechevalier authored and YakDriver committed Jul 13, 2021
1 parent a8eb4c7 commit 26271f3
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 0 deletions.
12 changes: 12 additions & 0 deletions aws/resource_aws_s3_bucket_object.go
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,11 @@ func resourceAwsS3BucketObject() *schema.Resource {
Optional: true,
ValidateFunc: validation.IsRFC3339Time,
},

"source_hash": {
Type: schema.TypeString,
Optional: true,
},
},
}
}
Expand Down Expand Up @@ -564,6 +569,12 @@ func resourceAwsS3BucketObjectCustomizeDiff(_ context.Context, d *schema.Resourc
if hasS3BucketObjectContentChanges(d) {
return d.SetNewComputed("version_id")
}

if d.HasChange("source_hash") {
d.SetNewComputed("version_id")
d.SetNewComputed("etag")
}

return nil
}

Expand All @@ -582,6 +593,7 @@ func hasS3BucketObjectContentChanges(d resourceDiffer) bool {
"metadata",
"server_side_encryption",
"source",
"source_hash",
"storage_class",
"website_redirect",
} {
Expand Down
57 changes: 57 additions & 0 deletions aws/resource_aws_s3_bucket_object_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,48 @@ func TestAccAWSS3BucketObject_contentBase64(t *testing.T) {
})
}

func TestAccAWSS3BucketObject_SourceHashTrigger(t *testing.T) {
var obj, updated_obj s3.GetObjectOutput
resourceName := "aws_s3_bucket_object.object"
source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }")
rewrite_source := func(*terraform.State) error {
if err := ioutil.WriteFile(source, []byte("{any other thing will do }"), 0644); err != nil {
os.Remove(source)
t.Fatal(err)
}
return nil
}
rInt := acctest.RandInt()

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSS3BucketObjectDestroy,
Steps: []resource.TestStep{
{
PreConfig: func() {},
Config: testAccAWSS3BucketObjectConfig_SourceHashTrigger(rInt, source),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSS3BucketObjectExists(resourceName, &obj),
testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"),
resource.TestCheckResourceAttr(resourceName, "source_hash", "7b006ff4d70f68cc65061acf2f802e6f"),
rewrite_source,
),
ExpectNonEmptyPlan: true,
},
{
PreConfig: func() {},
Config: testAccAWSS3BucketObjectConfig_SourceHashTrigger(rInt, source),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSS3BucketObjectExists(resourceName, &updated_obj),
testAccCheckAWSS3BucketObjectBody(&updated_obj, "{any other thing will do }"),
resource.TestCheckResourceAttr(resourceName, "source_hash", "77a736aa9e04d0dc96b9b30894963983"),
),
},
},
})
}

func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) {
var obj s3.GetObjectOutput
resourceName := "aws_s3_bucket_object.object"
Expand Down Expand Up @@ -1582,6 +1624,21 @@ resource "aws_s3_bucket_object" "object" {
`, randInt, contentBase64)
}

func testAccAWSS3BucketObjectConfig_SourceHashTrigger(randInt int, source string) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "object_bucket" {
bucket = "tf-object-test-bucket-%d"
}
resource "aws_s3_bucket_object" "object" {
bucket = "${aws_s3_bucket.object_bucket.bucket}"
key = "test-key"
source = "%s"
source_hash = "${filemd5("%s")}"
}
`, randInt, source, source)
}

func testAccAWSS3BucketObjectConfig_updateable(randInt int, bucketVersioning bool, source string) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "object_bucket_3" {
Expand Down
1 change: 1 addition & 0 deletions website/docs/r/s3_bucket_object.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ The following arguments are supported:
for the object. Can be either "`STANDARD`", "`REDUCED_REDUNDANCY`", "`ONEZONE_IA`", "`INTELLIGENT_TIERING`", "`GLACIER`", "`DEEP_ARCHIVE`", or "`STANDARD_IA`". Defaults to "`STANDARD`".
* `etag` - (Optional) Used to trigger updates. The only meaningful value is `${filemd5("path/to/file")}` (Terraform 0.11.12 or later) or `${md5(file("path/to/file"))}` (Terraform 0.11.11 or earlier).
This attribute is not compatible with KMS encryption, `kms_key_id` or `server_side_encryption = "aws:kms"`.
* `source_hash` - (Optional) Used to trigger updates based on source local changes. If used, must be set to `${filemd5("path/to/source")}` (Terraform 0.11.12 or later). This differs from `etag` since the value is stored in the state and does not come from AWS. Especially useful to address `etag` KMS encryption limitations.
* `server_side_encryption` - (Optional) Specifies server-side encryption of the object in S3. Valid values are "`AES256`" and "`aws:kms`".
* `kms_key_id` - (Optional) Amazon Resource Name (ARN) of the KMS Key to use for object encryption. If the S3 Bucket has server-side encryption enabled, that value will automatically be used. If referencing the
`aws_kms_key` resource, use the `arn` attribute. If referencing the `aws_kms_alias` data source or resource, use the `target_key_arn` attribute. Terraform will only perform drift detection if a configuration value
Expand Down

0 comments on commit 26271f3

Please sign in to comment.