diff --git a/builtin/providers/aws/provider.go b/builtin/providers/aws/provider.go index fc9a55874299..00de87f9d92a 100644 --- a/builtin/providers/aws/provider.go +++ b/builtin/providers/aws/provider.go @@ -208,6 +208,7 @@ func Provider() terraform.ResourceProvider { "aws_route_table": resourceAwsRouteTable(), "aws_route_table_association": resourceAwsRouteTableAssociation(), "aws_s3_bucket": resourceAwsS3Bucket(), + "aws_s3_bucket_object": resourceAwsS3BucketObject(), "aws_security_group": resourceAwsSecurityGroup(), "aws_security_group_rule": resourceAwsSecurityGroupRule(), "aws_spot_instance_request": resourceAwsSpotInstanceRequest(), diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go new file mode 100644 index 000000000000..9d46952d0747 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -0,0 +1,121 @@ +package aws + +import ( + "fmt" + "log" + "os" + + "github.com/hashicorp/terraform/helper/schema" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/service/s3" +) + +func resourceAwsS3BucketObject() *schema.Resource { + return &schema.Resource{ + Create: resourceAwsS3BucketObjectPut, + Read: resourceAwsS3BucketObjectRead, + Update: resourceAwsS3BucketObjectPut, + Delete: resourceAwsS3BucketObjectDelete, + + Schema: map[string]*schema.Schema{ + "bucket": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "key": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "source": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "etag": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + source := d.Get("source").(string) + + file, err := os.Open(source) + + if err != nil { + return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err) + } + + resp, err := s3conn.PutObject( + &s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + Body: file, + }) + + if err != nil { + return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) + } + + d.Set("etag", resp.ETag) + d.SetId(key) + return nil +} + +func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + etag := d.Get("etag").(string) + + resp, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + IfMatch: aws.String(etag), + }) + + if err != nil { + // If S3 returns a 404 Request Failure, mark the object as destroyed + if awsErr, ok := err.(awserr.RequestFailure); ok && awsErr.StatusCode() == 404 { + d.SetId("") + log.Printf("[WARN] Error Reading Object (%s), object not found (HTTP status 404)", key) + return nil + } + return err + } + + log.Printf("[DEBUG] Reading S3 Bucket Object meta: %s", resp) + return nil +} + +func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + + _, err := s3conn.DeleteObject( + &s3.DeleteObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + }) + if err != nil { + return fmt.Errorf("Error deleting S3 bucket object: %s", err) + } + return nil +} diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go new file mode 100644 index 000000000000..4f947736aeb5 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go @@ -0,0 +1,99 @@ +package aws + +import ( + "fmt" + "io/ioutil" + "os" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/service/s3" +) + +var tf, err = ioutil.TempFile("", "tf") + +func TestAccAWSS3BucketObject_basic(t *testing.T) { + // first write some data to the tempfile just so it's not 0 bytes. + ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if err != nil { + panic(err) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccAWSS3BucketObjectConfig, + Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), + }, + }, + }) +} + +func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error { + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + + for _, rs := range s.RootModule().Resources { + if rs.Type != "aws_s3_bucket_object" { + continue + } + + _, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.Attributes["etag"]), + }) + if err == nil { + return fmt.Errorf("AWS S3 Object still exists: %s", rs.Primary.ID) + } + } + return nil +} + +func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + defer os.Remove(tf.Name()) + + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not Found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No S3 Bucket Object ID is set") + } + + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + _, err := s3conn.GetObject( + &s3.GetObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.Attributes["etag"]), + }) + if err != nil { + return fmt.Errorf("S3Bucket Object error: %s", err) + } + return nil + } +} + +var randomBucket = randInt +var testAccAWSS3BucketObjectConfig = fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" +} + +resource "aws_s3_bucket_object" "object" { + bucket = "${aws_s3_bucket.object_bucket.bucket}" + key = "test-key" + source = "%s" +} +`, randomBucket, tf.Name()) diff --git a/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown new file mode 100644 index 000000000000..63d201b8268f --- /dev/null +++ b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown @@ -0,0 +1,39 @@ +--- +layout: "aws" +page_title: "AWS: aws_s3_bucket_object" +side_bar_current: "docs-aws-resource-s3-bucket-object" +description: |- + Provides a S3 bucket object resource. +--- + +# aws\_s3\_bucket\_object + +Provides a S3 bucket object resource. + +## Example Usage + +### Uploading a file to a bucket + +``` +resource "aws_s3_bucket_object" "object" { + bucket = "your_bucket_name" + key = "new_object_key" + source = "path/to/file" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `bucket` - (Required) The name of the bucket to put the file in. +* `key` - (Required) The name of the object once it is in the bucket. +* `source` - (Required) The path to the source file being uploaded to the bucket. + +## Attributes Reference + +The following attributes are exported + +* `id` - the `key` of the resource supplied above +* `etag` - the ETag generated for the object. This is often the MD5 hash of the +object, unless you specify your own encryption keys diff --git a/website/source/layouts/aws.erb b/website/source/layouts/aws.erb index a212bb6977d7..2bbff22f4b9b 100644 --- a/website/source/layouts/aws.erb +++ b/website/source/layouts/aws.erb @@ -279,6 +279,10 @@ aws_s3_bucket +