-
Notifications
You must be signed in to change notification settings - Fork 9.7k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
provider/aws: Add S3 Bucket Object (supercedes #2079) #2898
Merged
Merged
Changes from all commits
Commits
Show all changes
7 commits
Select commit
Hold shift + click to select a range
9a8625e
added new provider for creating objects in an s3 bucket
m-s-austin 3eceddc
added documentation
m-s-austin 5c6083e
Merge branch 'master' into f-aws-s3-object-pr-2079
catsby f6bad31
update docs
catsby 44f4705
provider/aws: Add S3 Bucket Object (supercedes #2079)
catsby 285b406
Merge remote-tracking branch 'upstream/master' into f-aws-s3-object-p…
catsby bfaea76
more tightly scope s3 bucket object error
catsby File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
package aws | ||
|
||
import ( | ||
"fmt" | ||
"log" | ||
"os" | ||
|
||
"github.com/hashicorp/terraform/helper/schema" | ||
|
||
"github.com/aws/aws-sdk-go/aws" | ||
"github.com/aws/aws-sdk-go/aws/awserr" | ||
"github.com/aws/aws-sdk-go/service/s3" | ||
) | ||
|
||
func resourceAwsS3BucketObject() *schema.Resource { | ||
return &schema.Resource{ | ||
Create: resourceAwsS3BucketObjectPut, | ||
Read: resourceAwsS3BucketObjectRead, | ||
Update: resourceAwsS3BucketObjectPut, | ||
Delete: resourceAwsS3BucketObjectDelete, | ||
|
||
Schema: map[string]*schema.Schema{ | ||
"bucket": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Required: true, | ||
ForceNew: true, | ||
}, | ||
|
||
"key": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Required: true, | ||
ForceNew: true, | ||
}, | ||
|
||
"source": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Required: true, | ||
ForceNew: true, | ||
}, | ||
|
||
"etag": &schema.Schema{ | ||
Type: schema.TypeString, | ||
Computed: true, | ||
}, | ||
}, | ||
} | ||
} | ||
|
||
func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) error { | ||
s3conn := meta.(*AWSClient).s3conn | ||
|
||
bucket := d.Get("bucket").(string) | ||
key := d.Get("key").(string) | ||
source := d.Get("source").(string) | ||
|
||
file, err := os.Open(source) | ||
|
||
if err != nil { | ||
return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err) | ||
} | ||
|
||
resp, err := s3conn.PutObject( | ||
&s3.PutObjectInput{ | ||
Bucket: aws.String(bucket), | ||
Key: aws.String(key), | ||
Body: file, | ||
}) | ||
|
||
if err != nil { | ||
return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) | ||
} | ||
|
||
d.Set("etag", resp.ETag) | ||
d.SetId(key) | ||
return nil | ||
} | ||
|
||
func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error { | ||
s3conn := meta.(*AWSClient).s3conn | ||
|
||
bucket := d.Get("bucket").(string) | ||
key := d.Get("key").(string) | ||
etag := d.Get("etag").(string) | ||
|
||
resp, err := s3conn.HeadObject( | ||
&s3.HeadObjectInput{ | ||
Bucket: aws.String(bucket), | ||
Key: aws.String(key), | ||
IfMatch: aws.String(etag), | ||
}) | ||
|
||
if err != nil { | ||
// If S3 returns a 404 Request Failure, mark the object as destroyed | ||
if awsErr, ok := err.(awserr.RequestFailure); ok && awsErr.StatusCode() == 404 { | ||
d.SetId("") | ||
log.Printf("[WARN] Error Reading Object (%s), object not found (HTTP status 404)", key) | ||
return nil | ||
} | ||
return err | ||
} | ||
|
||
log.Printf("[DEBUG] Reading S3 Bucket Object meta: %s", resp) | ||
return nil | ||
} | ||
|
||
func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) error { | ||
s3conn := meta.(*AWSClient).s3conn | ||
|
||
bucket := d.Get("bucket").(string) | ||
key := d.Get("key").(string) | ||
|
||
_, err := s3conn.DeleteObject( | ||
&s3.DeleteObjectInput{ | ||
Bucket: aws.String(bucket), | ||
Key: aws.String(key), | ||
}) | ||
if err != nil { | ||
return fmt.Errorf("Error deleting S3 bucket object: %s", err) | ||
} | ||
return nil | ||
} |
99 changes: 99 additions & 0 deletions
99
builtin/providers/aws/resource_aws_s3_bucket_object_test.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
package aws | ||
|
||
import ( | ||
"fmt" | ||
"io/ioutil" | ||
"os" | ||
"testing" | ||
|
||
"github.com/hashicorp/terraform/helper/resource" | ||
"github.com/hashicorp/terraform/terraform" | ||
|
||
"github.com/aws/aws-sdk-go/aws" | ||
"github.com/aws/aws-sdk-go/service/s3" | ||
) | ||
|
||
var tf, err = ioutil.TempFile("", "tf") | ||
|
||
func TestAccAWSS3BucketObject_basic(t *testing.T) { | ||
// first write some data to the tempfile just so it's not 0 bytes. | ||
ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644) | ||
resource.Test(t, resource.TestCase{ | ||
PreCheck: func() { | ||
if err != nil { | ||
panic(err) | ||
} | ||
testAccPreCheck(t) | ||
}, | ||
Providers: testAccProviders, | ||
CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, | ||
Steps: []resource.TestStep{ | ||
resource.TestStep{ | ||
Config: testAccAWSS3BucketObjectConfig, | ||
Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), | ||
}, | ||
}, | ||
}) | ||
} | ||
|
||
func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error { | ||
s3conn := testAccProvider.Meta().(*AWSClient).s3conn | ||
|
||
for _, rs := range s.RootModule().Resources { | ||
if rs.Type != "aws_s3_bucket_object" { | ||
continue | ||
} | ||
|
||
_, err := s3conn.HeadObject( | ||
&s3.HeadObjectInput{ | ||
Bucket: aws.String(rs.Primary.Attributes["bucket"]), | ||
Key: aws.String(rs.Primary.Attributes["key"]), | ||
IfMatch: aws.String(rs.Primary.Attributes["etag"]), | ||
}) | ||
if err == nil { | ||
return fmt.Errorf("AWS S3 Object still exists: %s", rs.Primary.ID) | ||
} | ||
} | ||
return nil | ||
} | ||
|
||
func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc { | ||
return func(s *terraform.State) error { | ||
|
||
defer os.Remove(tf.Name()) | ||
|
||
rs, ok := s.RootModule().Resources[n] | ||
if !ok { | ||
return fmt.Errorf("Not Found: %s", n) | ||
} | ||
|
||
if rs.Primary.ID == "" { | ||
return fmt.Errorf("No S3 Bucket Object ID is set") | ||
} | ||
|
||
s3conn := testAccProvider.Meta().(*AWSClient).s3conn | ||
_, err := s3conn.GetObject( | ||
&s3.GetObjectInput{ | ||
Bucket: aws.String(rs.Primary.Attributes["bucket"]), | ||
Key: aws.String(rs.Primary.Attributes["key"]), | ||
IfMatch: aws.String(rs.Primary.Attributes["etag"]), | ||
}) | ||
if err != nil { | ||
return fmt.Errorf("S3Bucket Object error: %s", err) | ||
} | ||
return nil | ||
} | ||
} | ||
|
||
var randomBucket = randInt | ||
var testAccAWSS3BucketObjectConfig = fmt.Sprintf(` | ||
resource "aws_s3_bucket" "object_bucket" { | ||
bucket = "tf-object-test-bucket-%d" | ||
} | ||
|
||
resource "aws_s3_bucket_object" "object" { | ||
bucket = "${aws_s3_bucket.object_bucket.bucket}" | ||
key = "test-key" | ||
source = "%s" | ||
} | ||
`, randomBucket, tf.Name()) |
39 changes: 39 additions & 0 deletions
39
website/source/docs/providers/aws/r/s3_bucket_object.html.markdown
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
--- | ||
layout: "aws" | ||
page_title: "AWS: aws_s3_bucket_object" | ||
side_bar_current: "docs-aws-resource-s3-bucket-object" | ||
description: |- | ||
Provides a S3 bucket object resource. | ||
--- | ||
|
||
# aws\_s3\_bucket\_object | ||
|
||
Provides a S3 bucket object resource. | ||
|
||
## Example Usage | ||
|
||
### Uploading a file to a bucket | ||
|
||
``` | ||
resource "aws_s3_bucket_object" "object" { | ||
bucket = "your_bucket_name" | ||
key = "new_object_key" | ||
source = "path/to/file" | ||
} | ||
``` | ||
|
||
## Argument Reference | ||
|
||
The following arguments are supported: | ||
|
||
* `bucket` - (Required) The name of the bucket to put the file in. | ||
* `key` - (Required) The name of the object once it is in the bucket. | ||
* `source` - (Required) The path to the source file being uploaded to the bucket. | ||
|
||
## Attributes Reference | ||
|
||
The following attributes are exported | ||
|
||
* `id` - the `key` of the resource supplied above | ||
* `etag` - the ETag generated for the object. This is often the MD5 hash of the | ||
object, unless you specify your own encryption keys |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This seems perhaps a little too wide of a net for the error. Like if I have a wifi hiccup I don't want TF to assume objects are gone. Can we scope this down to an "Object Not Found" error and return anything else?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I just tried to scope this, but with no luck:
Response:
😱
@phinze I did check
GetObject
(instead ofHeadObject
), which does returnNoSuchKey
for the code. I'm not sure if we should switch though, because of the included network traffic for potentially larger objects. Thoughts?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@catsby from that output it looks like something like this would work for a scope:
Similar to how we do it for S3 Buckets.
err.(awserr.RequestFailure)
might work too, which would make us able to use literally the same snippet.Let me know what you think.