diff --git a/aws/resource_aws_s3_bucket_object.go b/aws/resource_aws_s3_bucket_object.go index 278823b5df33..3e7786c025a7 100644 --- a/aws/resource_aws_s3_bucket_object.go +++ b/aws/resource_aws_s3_bucket_object.go @@ -22,12 +22,12 @@ import ( func resourceAwsS3BucketObject() *schema.Resource { return &schema.Resource{ - Create: resourceAwsS3BucketObjectPut, + Create: resourceAwsS3BucketObjectCreate, Read: resourceAwsS3BucketObjectRead, - Update: resourceAwsS3BucketObjectPut, + Update: resourceAwsS3BucketObjectUpdate, Delete: resourceAwsS3BucketObjectDelete, - CustomizeDiff: updateComputedAttributes, + CustomizeDiff: resourceAwsS3BucketObjectCustomizeDiff, Schema: map[string]*schema.Schema{ "bucket": { @@ -36,9 +36,15 @@ func resourceAwsS3BucketObject() *schema.Resource { ForceNew: true, }, + "key": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "acl": { Type: schema.TypeString, - Default: "private", + Default: s3.ObjectCannedACLPrivate, Optional: true, ValidateFunc: validation.StringInSlice([]string{ s3.ObjectCannedACLPrivate, @@ -77,12 +83,6 @@ func resourceAwsS3BucketObject() *schema.Resource { Computed: true, }, - "key": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - "source": { Type: schema.TypeString, Optional: true, @@ -156,13 +156,6 @@ func resourceAwsS3BucketObject() *schema.Resource { } } -func updateComputedAttributes(d *schema.ResourceDiff, meta interface{}) error { - if d.HasChange("etag") { - d.SetNewComputed("version_id") - } - return nil -} - func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) error { s3conn := meta.(*AWSClient).s3conn @@ -178,10 +171,16 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro } file, err := os.Open(path) if err != nil { - return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err) + return fmt.Errorf("Error opening S3 bucket object source (%s): %s", path, err) } body = file + defer func() { + err := file.Close() + if err != nil { + log.Printf("[WARN] Error closing S3 bucket object source (%s): %s", path, err) + } + }() } else if v, ok := d.GetOk("content"); ok { content := v.(string) body = bytes.NewReader([]byte(content)) @@ -258,19 +257,18 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro putInput.WebsiteRedirectLocation = aws.String(v.(string)) } - resp, err := s3conn.PutObject(putInput) - if err != nil { + if _, err := s3conn.PutObject(putInput); err != nil { return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) } - // See https://forums.aws.amazon.com/thread.jspa?threadID=44003 - d.Set("etag", strings.Trim(*resp.ETag, `"`)) - - d.Set("version_id", resp.VersionId) d.SetId(key) return resourceAwsS3BucketObjectRead(d, meta) } +func resourceAwsS3BucketObjectCreate(d *schema.ResourceData, meta interface{}) error { + return resourceAwsS3BucketObjectPut(d, meta) +} + func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error { s3conn := meta.(*AWSClient).s3conn @@ -321,7 +319,8 @@ func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) err d.Set("kms_key_id", resp.SSEKMSKeyId) } } - d.Set("etag", strings.Trim(*resp.ETag, `"`)) + // See https://forums.aws.amazon.com/thread.jspa?threadID=44003 + d.Set("etag", strings.Trim(aws.StringValue(resp.ETag), `"`)) // The "STANDARD" (which is also the default) storage // class when set would not be included in the results. @@ -331,20 +330,56 @@ func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) err } if !restricted { - tagResp, err := s3conn.GetObjectTagging( - &s3.GetObjectTaggingInput{ - Bucket: aws.String(bucket), - Key: aws.String(key), - }) - if err != nil { - return fmt.Errorf("Failed to get object tags (bucket: %s, key: %s): %s", bucket, key, err) + if err := getTagsS3Object(s3conn, d); err != nil { + return fmt.Errorf("error getting S3 object tags (bucket: %s, key: %s): %s", bucket, key, err) } - d.Set("tags", tagsToMapS3(tagResp.TagSet)) } return nil } +func resourceAwsS3BucketObjectUpdate(d *schema.ResourceData, meta interface{}) error { + // Changes to any of these attributes requires creation of a new object version (if bucket is versioned): + for _, key := range []string{ + "cache_control", + "content_disposition", + "content_encoding", + "content_language", + "content_type", + "source", + "content", + "content_base64", + "storage_class", + "server_side_encryption", + "kms_key_id", + "etag", + "website_redirect", + } { + if d.HasChange(key) { + return resourceAwsS3BucketObjectPut(d, meta) + } + } + + conn := meta.(*AWSClient).s3conn + + if d.HasChange("acl") { + _, err := conn.PutObjectAcl(&s3.PutObjectAclInput{ + Bucket: aws.String(d.Get("bucket").(string)), + Key: aws.String(d.Get("key").(string)), + ACL: aws.String(d.Get("acl").(string)), + }) + if err != nil { + return fmt.Errorf("error putting S3 object ACL: %s", err) + } + } + + if err := setTagsS3Object(conn, d); err != nil { + return fmt.Errorf("error setting S3 object tags: %s", err) + } + + return resourceAwsS3BucketObjectRead(d, meta) +} + func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) error { s3conn := meta.(*AWSClient).s3conn @@ -388,3 +423,11 @@ func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) e return nil } + +func resourceAwsS3BucketObjectCustomizeDiff(d *schema.ResourceDiff, meta interface{}) error { + if d.HasChange("etag") { + d.SetNewComputed("version_id") + } + + return nil +} diff --git a/aws/resource_aws_s3_bucket_object_test.go b/aws/resource_aws_s3_bucket_object_test.go index 99c4c4d0ee2f..0848f5d78539 100644 --- a/aws/resource_aws_s3_bucket_object_test.go +++ b/aws/resource_aws_s3_bucket_object_test.go @@ -1,6 +1,7 @@ package aws import ( + "encoding/base64" "fmt" "io/ioutil" "os" @@ -17,19 +18,12 @@ import ( ) func TestAccAWSS3BucketObject_source(t *testing.T) { - tmpFile, err := ioutil.TempFile("", "tf-acc-s3-obj-source") - if err != nil { - t.Fatal(err) - } - defer os.Remove(tmpFile.Name()) - - rInt := acctest.RandInt() - // first write some data to the tempfile just so it's not 0 bytes. - err = ioutil.WriteFile(tmpFile.Name(), []byte("{anything will do }"), 0644) - if err != nil { - t.Fatal(err) - } var obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() + + source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") + defer os.Remove(source) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -37,16 +31,20 @@ func TestAccAWSS3BucketObject_source(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfigSource(rInt, tmpFile.Name()), - Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), + Config: testAccAWSS3BucketObjectConfigSource(rInt, source), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), + ), }, }, }) } func TestAccAWSS3BucketObject_content(t *testing.T) { - rInt := acctest.RandInt() var obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -55,22 +53,10 @@ func TestAccAWSS3BucketObject_content(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfigContent(rInt), + Config: testAccAWSS3BucketObjectConfigContent(rInt, "some_bucket_content"), Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), - func(s *terraform.State) error { - body, err := ioutil.ReadAll(obj.Body) - if err != nil { - return fmt.Errorf("failed to read body: %s", err) - } - obj.Body.Close() - - if got, want := string(body), "some_bucket_content"; got != want { - return fmt.Errorf("wrong result body %q; want %q", got, want) - } - - return nil - }, + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + testAccCheckAWSS3BucketObjectBody(&obj, "some_bucket_content"), ), }, }, @@ -78,8 +64,9 @@ func TestAccAWSS3BucketObject_content(t *testing.T) { } func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { - rInt := acctest.RandInt() var obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -88,22 +75,10 @@ func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfigContentBase64(rInt), + Config: testAccAWSS3BucketObjectConfigContentBase64(rInt, base64.StdEncoding.EncodeToString([]byte("some_bucket_content"))), Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), - func(s *terraform.State) error { - body, err := ioutil.ReadAll(obj.Body) - if err != nil { - return fmt.Errorf("failed to read body: %s", err) - } - obj.Body.Close() - - if got, want := string(body), "some_bucket_content"; got != want { - return fmt.Errorf("wrong result body %q; want %q", got, want) - } - - return nil - }, + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + testAccCheckAWSS3BucketObjectBody(&obj, "some_bucket_content"), ), }, }, @@ -111,20 +86,12 @@ func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { } func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { - tmpFile, err := ioutil.TempFile("", "tf-acc-s3-obj-content-characteristics") - if err != nil { - t.Fatal(err) - } - defer os.Remove(tmpFile.Name()) - + var obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" rInt := acctest.RandInt() - // first write some data to the tempfile just so it's not 0 bytes. - err = ioutil.WriteFile(tmpFile.Name(), []byte("{anything will do }"), 0644) - if err != nil { - t.Fatal(err) - } - var obj s3.GetObjectOutput + source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") + defer os.Remove(source) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -132,13 +99,12 @@ func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_withContentCharacteristics(rInt, tmpFile.Name()), + Config: testAccAWSS3BucketObjectConfig_withContentCharacteristics(rInt, source), Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", "content_type", "binary/octet-stream"), - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", "website_redirect", "http://google.com"), + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), + resource.TestCheckResourceAttr(resourceName, "content_type", "binary/octet-stream"), + resource.TestCheckResourceAttr(resourceName, "website_redirect", "http://google.com"), ), }, }, @@ -146,18 +112,83 @@ func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) { } func TestAccAWSS3BucketObject_updates(t *testing.T) { - tmpFile, err := ioutil.TempFile("", "tf-acc-s3-obj-updates") - if err != nil { - t.Fatal(err) - } - defer os.Remove(tmpFile.Name()) + var originalObj, modifiedObj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() + + sourceInitial := testAccAWSS3BucketObjectCreateTempFile(t, "initial object state") + defer os.Remove(sourceInitial) + sourceModified := testAccAWSS3BucketObjectCreateTempFile(t, "modified object") + defer os.Remove(sourceInitial) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAWSS3BucketObjectConfig_updateable(rInt, false, sourceInitial), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &originalObj), + testAccCheckAWSS3BucketObjectBody(&originalObj, "initial object state"), + resource.TestCheckResourceAttr(resourceName, "etag", "647d1d58e1011c743ec67d5e8af87b53"), + ), + }, + { + Config: testAccAWSS3BucketObjectConfig_updateable(rInt, false, sourceModified), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &modifiedObj), + testAccCheckAWSS3BucketObjectBody(&modifiedObj, "modified object"), + resource.TestCheckResourceAttr(resourceName, "etag", "1c7fd13df1515c2a13ad9eb068931f09"), + ), + }, + }, + }) +} +func TestAccAWSS3BucketObject_updatesWithVersioning(t *testing.T) { + var originalObj, modifiedObj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" rInt := acctest.RandInt() - err = ioutil.WriteFile(tmpFile.Name(), []byte("initial object state"), 0644) - if err != nil { - t.Fatal(err) - } + + sourceInitial := testAccAWSS3BucketObjectCreateTempFile(t, "initial versioned object state") + defer os.Remove(sourceInitial) + sourceModified := testAccAWSS3BucketObjectCreateTempFile(t, "modified versioned object") + defer os.Remove(sourceInitial) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAWSS3BucketObjectConfig_updateable(rInt, true, sourceInitial), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &originalObj), + testAccCheckAWSS3BucketObjectBody(&originalObj, "initial versioned object state"), + resource.TestCheckResourceAttr(resourceName, "etag", "cee4407fa91906284e2a5e5e03e86b1b"), + ), + }, + { + Config: testAccAWSS3BucketObjectConfig_updateable(rInt, true, sourceModified), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &modifiedObj), + testAccCheckAWSS3BucketObjectBody(&modifiedObj, "modified versioned object"), + resource.TestCheckResourceAttr(resourceName, "etag", "00b8c73b1b50e7cc932362c7225b8e29"), + testAccCheckAWSS3BucketObjectVersionIdDiffers(&modifiedObj, &originalObj), + ), + }, + }, + }) +} + +func TestAccAWSS3BucketObject_kms(t *testing.T) { var obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() + + source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") + defer os.Remove(source) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -165,43 +196,138 @@ func TestAccAWSS3BucketObject_updates(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_updates(rInt, tmpFile.Name()), + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_withKMSId(rInt, source), Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), - resource.TestCheckResourceAttr("aws_s3_bucket_object.object", "etag", "647d1d58e1011c743ec67d5e8af87b53"), + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + testAccCheckAWSS3BucketObjectSSE(resourceName, "aws:kms"), + testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), ), }, + }, + }) +} + +func TestAccAWSS3BucketObject_sse(t *testing.T) { + var obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() + + source := testAccAWSS3BucketObjectCreateTempFile(t, "{anything will do }") + defer os.Remove(source) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ { - PreConfig: func() { - err = ioutil.WriteFile(tmpFile.Name(), []byte("modified object"), 0644) - if err != nil { - t.Fatal(err) - } - }, - Config: testAccAWSS3BucketObjectConfig_updates(rInt, tmpFile.Name()), + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_withSSE(rInt, source), Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), - resource.TestCheckResourceAttr("aws_s3_bucket_object.object", "etag", "1c7fd13df1515c2a13ad9eb068931f09"), + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + testAccCheckAWSS3BucketObjectSSE(resourceName, "AES256"), + testAccCheckAWSS3BucketObjectBody(&obj, "{anything will do }"), ), }, }, }) } -func TestAccAWSS3BucketObject_updatesWithVersioning(t *testing.T) { - tmpFile, err := ioutil.TempFile("", "tf-acc-s3-obj-updates-w-versions") - if err != nil { - t.Fatal(err) - } - defer os.Remove(tmpFile.Name()) +func TestAccAWSS3BucketObject_acl(t *testing.T) { + var obj1, obj2, obj3 s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAWSS3BucketObjectConfig_acl(rInt, "some_bucket_content", "private"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), + testAccCheckAWSS3BucketObjectBody(&obj1, "some_bucket_content"), + resource.TestCheckResourceAttr(resourceName, "acl", "private"), + testAccCheckAWSS3BucketObjectAcl(resourceName, []string{"FULL_CONTROL"}), + ), + }, + { + Config: testAccAWSS3BucketObjectConfig_acl(rInt, "some_bucket_content", "public-read"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj2), + testAccCheckAWSS3BucketObjectVersionIdEquals(&obj2, &obj1), + testAccCheckAWSS3BucketObjectBody(&obj2, "some_bucket_content"), + resource.TestCheckResourceAttr(resourceName, "acl", "public-read"), + testAccCheckAWSS3BucketObjectAcl(resourceName, []string{"FULL_CONTROL", "READ"}), + ), + }, + { + Config: testAccAWSS3BucketObjectConfig_acl(rInt, "changed_some_bucket_content", "private"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj3), + testAccCheckAWSS3BucketObjectVersionIdDiffers(&obj3, &obj2), + testAccCheckAWSS3BucketObjectBody(&obj3, "changed_some_bucket_content"), + resource.TestCheckResourceAttr(resourceName, "acl", "private"), + testAccCheckAWSS3BucketObjectAcl(resourceName, []string{"FULL_CONTROL"}), + ), + }, + }, + }) +} +func TestAccAWSS3BucketObject_storageClass(t *testing.T) { + var obj s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" rInt := acctest.RandInt() - err = ioutil.WriteFile(tmpFile.Name(), []byte("initial versioned object state"), 0644) - if err != nil { - t.Fatal(err) - } - var originalObj, modifiedObj s3.GetObjectOutput + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + { + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfigContent(rInt, "some_bucket_content"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + resource.TestCheckResourceAttr(resourceName, "storage_class", "STANDARD"), + testAccCheckAWSS3BucketObjectStorageClass(resourceName, "STANDARD"), + ), + }, + { + Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "REDUCED_REDUNDANCY"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + resource.TestCheckResourceAttr(resourceName, "storage_class", "REDUCED_REDUNDANCY"), + testAccCheckAWSS3BucketObjectStorageClass(resourceName, "REDUCED_REDUNDANCY"), + ), + }, + { + Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "GLACIER"), + Check: resource.ComposeTestCheckFunc( + // Can't GetObject on an object in Glacier without restoring it. + resource.TestCheckResourceAttr(resourceName, "storage_class", "GLACIER"), + testAccCheckAWSS3BucketObjectStorageClass(resourceName, "GLACIER"), + ), + }, + { + Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "INTELLIGENT_TIERING"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj), + resource.TestCheckResourceAttr(resourceName, "storage_class", "INTELLIGENT_TIERING"), + testAccCheckAWSS3BucketObjectStorageClass(resourceName, "INTELLIGENT_TIERING"), + ), + }, + }, + }) +} + +func TestAccAWSS3BucketObject_tags(t *testing.T) { + var obj1, obj2, obj3, obj4 s3.GetObjectOutput + resourceName := "aws_s3_bucket_object.object" + rInt := acctest.RandInt() resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -209,48 +335,76 @@ func TestAccAWSS3BucketObject_updatesWithVersioning(t *testing.T) { CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { - Config: testAccAWSS3BucketObjectConfig_updatesWithVersioning(rInt, tmpFile.Name()), + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_withTags(rInt, "stuff"), Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &originalObj), - resource.TestCheckResourceAttr("aws_s3_bucket_object.object", "etag", "cee4407fa91906284e2a5e5e03e86b1b"), + testAccCheckAWSS3BucketObjectExists(resourceName, &obj1), + testAccCheckAWSS3BucketObjectBody(&obj1, "stuff"), + resource.TestCheckResourceAttr(resourceName, "tags.%", "3"), + resource.TestCheckResourceAttr(resourceName, "tags.Key1", "AAA"), + resource.TestCheckResourceAttr(resourceName, "tags.Key2", "BBB"), + resource.TestCheckResourceAttr(resourceName, "tags.Key3", "CCC"), ), }, { - PreConfig: func() { - err = ioutil.WriteFile(tmpFile.Name(), []byte("modified versioned object"), 0644) - if err != nil { - t.Fatal(err) - } - }, - Config: testAccAWSS3BucketObjectConfig_updatesWithVersioning(rInt, tmpFile.Name()), + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_withUpdatedTags(rInt, "stuff"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj2), + testAccCheckAWSS3BucketObjectVersionIdEquals(&obj2, &obj1), + testAccCheckAWSS3BucketObjectBody(&obj2, "stuff"), + resource.TestCheckResourceAttr(resourceName, "tags.%", "4"), + resource.TestCheckResourceAttr(resourceName, "tags.Key2", "BBB"), + resource.TestCheckResourceAttr(resourceName, "tags.Key3", "XXX"), + resource.TestCheckResourceAttr(resourceName, "tags.Key4", "DDD"), + resource.TestCheckResourceAttr(resourceName, "tags.Key5", "EEE"), + ), + }, + { + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_withNoTags(rInt, "stuff"), + Check: resource.ComposeTestCheckFunc( + testAccCheckAWSS3BucketObjectExists(resourceName, &obj3), + testAccCheckAWSS3BucketObjectVersionIdEquals(&obj3, &obj2), + testAccCheckAWSS3BucketObjectBody(&obj3, "stuff"), + resource.TestCheckResourceAttr(resourceName, "tags.%", "0"), + ), + }, + { + PreConfig: func() {}, + Config: testAccAWSS3BucketObjectConfig_withTags(rInt, "changed stuff"), Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &modifiedObj), - resource.TestCheckResourceAttr("aws_s3_bucket_object.object", "etag", "00b8c73b1b50e7cc932362c7225b8e29"), - testAccCheckAWSS3BucketObjectVersionIdDiffers(&originalObj, &modifiedObj), - testAccCheckResourceAttrMatchesVersionId("data.template_file.object_version", "rendered", &modifiedObj), + testAccCheckAWSS3BucketObjectExists(resourceName, &obj4), + testAccCheckAWSS3BucketObjectVersionIdDiffers(&obj4, &obj3), + testAccCheckAWSS3BucketObjectBody(&obj4, "changed stuff"), + resource.TestCheckResourceAttr(resourceName, "tags.%", "3"), + resource.TestCheckResourceAttr(resourceName, "tags.Key1", "AAA"), + resource.TestCheckResourceAttr(resourceName, "tags.Key2", "BBB"), + resource.TestCheckResourceAttr(resourceName, "tags.Key3", "CCC"), ), }, }, }) } -func testAccCheckResourceAttrMatchesVersionId(resourceName string, attribute string, object *s3.GetObjectOutput) resource.TestCheckFunc { +func testAccCheckAWSS3BucketObjectVersionIdDiffers(first, second *s3.GetObjectOutput) resource.TestCheckFunc { return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[resourceName] - if !ok { - return fmt.Errorf("Not Found: %s", resourceName) + if first.VersionId == nil { + return fmt.Errorf("Expected first object to have VersionId: %s", first) + } + if second.VersionId == nil { + return fmt.Errorf("Expected second object to have VersionId: %s", second) } - attrValue := aws.String(rs.Primary.Attributes[attribute]) - if *attrValue != *object.VersionId { - return fmt.Errorf("Expected Version IDs to be the same, but they were different (%s vs %s)", *attrValue, *object.VersionId) + if *first.VersionId == *second.VersionId { + return fmt.Errorf("Expected Version IDs to differ, but they are equal (%s)", *first.VersionId) } return nil } } -func testAccCheckAWSS3BucketObjectVersionIdDiffers(first, second *s3.GetObjectOutput) resource.TestCheckFunc { +func testAccCheckAWSS3BucketObjectVersionIdEquals(first, second *s3.GetObjectOutput) resource.TestCheckFunc { return func(s *terraform.State) error { if first.VersionId == nil { return fmt.Errorf("Expected first object to have VersionId: %s", first) @@ -259,8 +413,8 @@ func testAccCheckAWSS3BucketObjectVersionIdDiffers(first, second *s3.GetObjectOu return fmt.Errorf("Expected second object to have VersionId: %s", second) } - if *first.VersionId == *second.VersionId { - return fmt.Errorf("Expected Version IDs to differ, but they are equal (%s)", *first.VersionId) + if *first.VersionId != *second.VersionId { + return fmt.Errorf("Expected Version IDs to be equal, but they differ (%s, %s)", *first.VersionId, *second.VersionId) } return nil @@ -316,101 +470,20 @@ func testAccCheckAWSS3BucketObjectExists(n string, obj *s3.GetObjectOutput) reso } } -func TestAccAWSS3BucketObject_kms(t *testing.T) { - rInt := acctest.RandInt() - var obj s3.GetObjectOutput - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, - Steps: []resource.TestStep{ - { - PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_withKMSId(rInt), - Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), - }, - }, - }) -} +func testAccCheckAWSS3BucketObjectBody(obj *s3.GetObjectOutput, want string) resource.TestCheckFunc { + return func(s *terraform.State) error { + body, err := ioutil.ReadAll(obj.Body) + if err != nil { + return fmt.Errorf("failed to read body: %s", err) + } + obj.Body.Close() -func TestAccAWSS3BucketObject_sse(t *testing.T) { - tmpFile, err := ioutil.TempFile("", "tf-acc-s3-obj-source-sse") - if err != nil { - t.Fatal(err) - } - defer os.Remove(tmpFile.Name()) + if got := string(body); got != want { + return fmt.Errorf("wrong result body %q; want %q", got, want) + } - // first write some data to the tempfile just so it's not 0 bytes. - err = ioutil.WriteFile(tmpFile.Name(), []byte("{anything will do}"), 0644) - if err != nil { - t.Fatal(err) + return nil } - - rInt := acctest.RandInt() - var obj s3.GetObjectOutput - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, - Steps: []resource.TestStep{ - { - PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_withSSE(rInt, tmpFile.Name()), - Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists( - "aws_s3_bucket_object.object", - &obj), - testAccCheckAWSS3BucketObjectSSE( - "aws_s3_bucket_object.object", - "aws:kms"), - ), - }, - }, - }) -} - -func TestAccAWSS3BucketObject_acl(t *testing.T) { - rInt := acctest.RandInt() - var obj s3.GetObjectOutput - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, - Steps: []resource.TestStep{ - { - Config: testAccAWSS3BucketObjectConfig_acl(rInt, "private"), - Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists( - "aws_s3_bucket_object.object", &obj), - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", - "acl", - "private"), - testAccCheckAWSS3BucketObjectAcl( - "aws_s3_bucket_object.object", - []string{"FULL_CONTROL"}), - ), - }, - { - Config: testAccAWSS3BucketObjectConfig_acl(rInt, "public-read"), - Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists( - "aws_s3_bucket_object.object", - &obj), - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", - "acl", - "public-read"), - testAccCheckAWSS3BucketObjectAcl( - "aws_s3_bucket_object.object", - []string{"FULL_CONTROL", "READ"}), - ), - }, - }, - }) } func testAccCheckAWSS3BucketObjectAcl(n string, expectedPerms []string) resource.TestCheckFunc { @@ -441,78 +514,6 @@ func testAccCheckAWSS3BucketObjectAcl(n string, expectedPerms []string) resource } } -func TestAccAWSS3BucketObject_storageClass(t *testing.T) { - rInt := acctest.RandInt() - var obj s3.GetObjectOutput - - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, - Steps: []resource.TestStep{ - { - PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfigContent(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists( - "aws_s3_bucket_object.object", - &obj), - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", - "storage_class", - "STANDARD"), - testAccCheckAWSS3BucketObjectStorageClass( - "aws_s3_bucket_object.object", - "STANDARD"), - ), - }, - { - Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "REDUCED_REDUNDANCY"), - Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists( - "aws_s3_bucket_object.object", - &obj), - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", - "storage_class", - "REDUCED_REDUNDANCY"), - testAccCheckAWSS3BucketObjectStorageClass( - "aws_s3_bucket_object.object", - "REDUCED_REDUNDANCY"), - ), - }, - { - Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "GLACIER"), - Check: resource.ComposeTestCheckFunc( - // Can't GetObject on an object in Glacier without restoring it. - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", - "storage_class", - "GLACIER"), - testAccCheckAWSS3BucketObjectStorageClass( - "aws_s3_bucket_object.object", - "GLACIER"), - ), - }, - { - Config: testAccAWSS3BucketObjectConfig_storageClass(rInt, "INTELLIGENT_TIERING"), - Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists( - "aws_s3_bucket_object.object", - &obj), - resource.TestCheckResourceAttr( - "aws_s3_bucket_object.object", - "storage_class", - "INTELLIGENT_TIERING"), - testAccCheckAWSS3BucketObjectStorageClass( - "aws_s3_bucket_object.object", - "INTELLIGENT_TIERING"), - ), - }, - }, - }) -} - func testAccCheckAWSS3BucketObjectStorageClass(n, expectedClass string) resource.TestCheckFunc { return func(s *terraform.State) error { rs := s.RootModule().Resources[n] @@ -571,200 +572,223 @@ func testAccCheckAWSS3BucketObjectSSE(n, expectedSSE string) resource.TestCheckF } } -func TestAccAWSS3BucketObject_tags(t *testing.T) { - rInt := acctest.RandInt() - var obj s3.GetObjectOutput +func testAccAWSS3BucketObjectCreateTempFile(t *testing.T, data string) string { + tmpFile, err := ioutil.TempFile("", "tf-acc-s3-obj") + if err != nil { + t.Fatal(err) + } + filename := tmpFile.Name() - resource.ParallelTest(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, - Steps: []resource.TestStep{ - { - PreConfig: func() {}, - Config: testAccAWSS3BucketObjectConfig_withTags(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object", &obj), - resource.TestCheckResourceAttr("aws_s3_bucket_object.object", "tags.%", "2"), - ), - }, - }, - }) + err = ioutil.WriteFile(filename, []byte(data), 0644) + if err != nil { + os.Remove(filename) + t.Fatal(err) + } + + return filename } func testAccAWSS3BucketObjectConfigSource(randInt int, source string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" + bucket = "tf-object-test-bucket-%d" } + resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" - key = "test-key" - source = "%s" - content_type = "binary/octet-stream" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + source = "%s" + content_type = "binary/octet-stream" } `, randInt, source) } func testAccAWSS3BucketObjectConfig_withContentCharacteristics(randInt int, source string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket_2" { - bucket = "tf-object-test-bucket-%d" +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket_2.bucket}" - key = "test-key" - source = "%s" - content_language = "en" - content_type = "binary/octet-stream" - website_redirect = "http://google.com" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + source = "%s" + content_language = "en" + content_type = "binary/octet-stream" + website_redirect = "http://google.com" } `, randInt, source) } -func testAccAWSS3BucketObjectConfigContent(randInt int) string { +func testAccAWSS3BucketObjectConfigContent(randInt int, content string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" + bucket = "tf-object-test-bucket-%d" } + resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" - key = "test-key" - content = "some_bucket_content" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content = "%s" } -`, randInt) +`, randInt, content) } -func testAccAWSS3BucketObjectConfigContentBase64(randInt int) string { +func testAccAWSS3BucketObjectConfigContentBase64(randInt int, contentBase64 string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" + bucket = "tf-object-test-bucket-%d" } + resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" - key = "test-key" - content_base64 = "c29tZV9idWNrZXRfY29udGVudA==" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content_base64 = "%s" } -`, randInt) +`, randInt, contentBase64) } -func testAccAWSS3BucketObjectConfig_updates(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_updateable(randInt int, bucketVersioning bool, source string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket_3" { - bucket = "tf-object-test-bucket-%d" + bucket = "tf-object-test-bucket-%d" + versioning { + enabled = %t + } } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket_3.bucket}" - key = "updateable-key" - source = "%s" - etag = "${md5(file("%s"))}" + bucket = "${aws_s3_bucket.object_bucket_3.bucket}" + key = "updateable-key" + source = "%s" + etag = "${md5(file("%s"))}" } -`, randInt, source, source) +`, randInt, bucketVersioning, source, source) } -func testAccAWSS3BucketObjectConfig_updatesWithVersioning(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_withKMSId(randInt int, source string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket_3" { - bucket = "tf-object-test-bucket-%d" - versioning { - enabled = true - } +resource "aws_kms_key" "kms_key_1" {} + +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket_3.bucket}" - key = "updateable-key" - source = "%s" - etag = "${md5(file("%s"))}" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + source = "%s" + kms_key_id = "${aws_kms_key.kms_key_1.arn}" +} +`, randInt, source) } -data "template_file" "object_version" { - template = "$${object_version}" +func testAccAWSS3BucketObjectConfig_withSSE(randInt int, source string) string { + return fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" +} - vars { - object_version = "${aws_s3_bucket_object.object.version_id}" - } +resource "aws_s3_bucket_object" "object" { + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + source = "%s" + server_side_encryption = "AES256" } -`, randInt, source, source) +`, randInt, source) } -func testAccAWSS3BucketObjectConfig_withKMSId(randInt int) string { +func testAccAWSS3BucketObjectConfig_acl(randInt int, content, acl string) string { return fmt.Sprintf(` -resource "aws_kms_key" "kms_key_1" { -} - -resource "aws_s3_bucket" "object_bucket_2" { - bucket = "tf-object-test-bucket-%d" +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" + versioning { + enabled = true + } } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket_2.bucket}" - key = "test-key" - content = "stuff" - kms_key_id = "${aws_kms_key.kms_key_1.arn}" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content = "%s" + acl = "%s" } -`, randInt) +`, randInt, content, acl) } -func testAccAWSS3BucketObjectConfig_withSSE(randInt int, source string) string { +func testAccAWSS3BucketObjectConfig_storageClass(randInt int, storage_class string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" + bucket = "tf-object-test-bucket-%d" } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" - key = "test-key" - source = "%s" - server_side_encryption = "aws:kms" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content = "some_bucket_content" + storage_class = "%s" } -`, randInt, source) +`, randInt, storage_class) } -func testAccAWSS3BucketObjectConfig_acl(randInt int, acl string) string { +func testAccAWSS3BucketObjectConfig_withTags(randInt int, content string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" + bucket = "tf-object-test-bucket-%d" + versioning { + enabled = true + } } + resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" - key = "test-key" - content = "some_bucket_content" - acl = "%s" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content = "%s" + tags = { + Key1 = "AAA" + Key2 = "BBB" + Key3 = "CCC" + } } -`, randInt, acl) +`, randInt, content) } -func testAccAWSS3BucketObjectConfig_storageClass(randInt int, storage_class string) string { +func testAccAWSS3BucketObjectConfig_withUpdatedTags(randInt int, content string) string { return fmt.Sprintf(` resource "aws_s3_bucket" "object_bucket" { - bucket = "tf-object-test-bucket-%d" + bucket = "tf-object-test-bucket-%d" + versioning { + enabled = true + } } + resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket.bucket}" - key = "test-key" - content = "some_bucket_content" - storage_class = "%s" + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content = "%s" + tags = { + Key2 = "BBB" + Key3 = "XXX" + Key4 = "DDD" + Key5 = "EEE" + } } -`, randInt, storage_class) +`, randInt, content) } -func testAccAWSS3BucketObjectConfig_withTags(randInt int) string { +func testAccAWSS3BucketObjectConfig_withNoTags(randInt int, content string) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "object_bucket_2" { - bucket = "tf-object-test-bucket-%d" +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" + versioning { + enabled = true + } } resource "aws_s3_bucket_object" "object" { - bucket = "${aws_s3_bucket.object_bucket_2.bucket}" - key = "test-key" - content = "stuff" - tags = { - Key1 = "Value One" - Description = "Very interesting" - } + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + content = "%s" } -`, randInt) +`, randInt, content) } diff --git a/aws/s3_tags.go b/aws/s3_tags.go index 57efcc8f4549..42fc6b8c0e3f 100644 --- a/aws/s3_tags.go +++ b/aws/s3_tags.go @@ -52,6 +52,57 @@ func setTagsS3(conn *s3.S3, d *schema.ResourceData) error { return nil } +func getTagsS3Object(conn *s3.S3, d *schema.ResourceData) error { + resp, err := retryOnAwsCode(s3.ErrCodeNoSuchKey, func() (interface{}, error) { + return conn.GetObjectTagging(&s3.GetObjectTaggingInput{ + Bucket: aws.String(d.Get("bucket").(string)), + Key: aws.String(d.Get("key").(string)), + }) + }) + if err != nil { + return err + } + + if err := d.Set("tags", tagsToMapS3(resp.(*s3.GetObjectTaggingOutput).TagSet)); err != nil { + return err + } + + return nil +} + +func setTagsS3Object(conn *s3.S3, d *schema.ResourceData) error { + if d.HasChange("tags") { + oraw, nraw := d.GetChange("tags") + o := oraw.(map[string]interface{}) + n := nraw.(map[string]interface{}) + + // Set tags + if len(o) > 0 { + _, err := conn.DeleteObjectTagging(&s3.DeleteObjectTaggingInput{ + Bucket: aws.String(d.Get("bucket").(string)), + Key: aws.String(d.Get("key").(string)), + }) + if err != nil { + return err + } + } + if len(n) > 0 { + _, err := conn.PutObjectTagging(&s3.PutObjectTaggingInput{ + Bucket: aws.String(d.Get("bucket").(string)), + Key: aws.String(d.Get("key").(string)), + Tagging: &s3.Tagging{ + TagSet: tagsFromMapS3(n), + }, + }) + if err != nil { + return err + } + } + } + + return nil +} + // diffTags takes our tags locally and the ones remotely and returns // the set of tags that must be created, and the set of tags that must // be destroyed.