From 9a8625e6a69a9c936ba658465130662802c1f506 Mon Sep 17 00:00:00 2001 From: Michael Austin Date: Tue, 26 May 2015 10:44:02 -0400 Subject: [PATCH 1/2] added new provider for creating objects in an s3 bucket --- builtin/providers/aws/provider.go | 1 + .../aws/resource_aws_s3_bucket_object.go | 112 ++++++++++++++++++ .../aws/resource_aws_s3_bucket_object_test.go | 98 +++++++++++++++ 3 files changed, 211 insertions(+) create mode 100644 builtin/providers/aws/resource_aws_s3_bucket_object.go create mode 100644 builtin/providers/aws/resource_aws_s3_bucket_object_test.go diff --git a/builtin/providers/aws/provider.go b/builtin/providers/aws/provider.go index db90549d2f0d..07e16282f8da 100644 --- a/builtin/providers/aws/provider.go +++ b/builtin/providers/aws/provider.go @@ -120,6 +120,7 @@ func Provider() terraform.ResourceProvider { "aws_route_table_association": resourceAwsRouteTableAssociation(), "aws_route_table": resourceAwsRouteTable(), "aws_s3_bucket": resourceAwsS3Bucket(), + "aws_s3_bucket_object": resourceAwsS3BucketObject(), "aws_security_group": resourceAwsSecurityGroup(), "aws_security_group_rule": resourceAwsSecurityGroupRule(), "aws_sqs_queue": resourceAwsSqsQueue(), diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go new file mode 100644 index 000000000000..74eb558c9e58 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -0,0 +1,112 @@ +package aws + +import ( + "fmt" + "log" + "os" + + "github.com/hashicorp/terraform/helper/schema" + + "github.com/awslabs/aws-sdk-go/aws" + "github.com/awslabs/aws-sdk-go/aws/awsutil" + "github.com/awslabs/aws-sdk-go/service/s3" +) + +func resourceAwsS3BucketObject() *schema.Resource { + return &schema.Resource{ + Create: resourceAwsS3BucketObjectPut, + Read: resourceAwsS3BucketObjectRead, + Update: resourceAwsS3BucketObjectPut, + Delete: resourceAwsS3BucketObjectDelete, + + Schema: map[string]*schema.Schema{ + "bucket": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "key": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "source": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + }, + } +} + +func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + source := d.Get("source").(string) + + file, err := os.Open(source) + + if err != nil { + d.SetId("") + return fmt.Errorf("Error opening S3 bucket object source(%s): %s", source, err) + } + + resp, err := s3conn.PutObject( + &s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + Body: file, + }) + + if err != nil { + d.SetId("") + return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) + } + + d.SetId(*resp.ETag) + return nil +} + +func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + + resp, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + IfMatch: aws.String(d.Id()), + }) + + if err != nil { + // if there is an error reading the object we assume it's not there. + d.SetId("") + log.Printf("Error Reading Object (%s): %s", key, err) + } + + log.Printf(awsutil.StringValue(resp)) + return nil +} + +func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + + _, err := s3conn.DeleteObject( + &s3.DeleteObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + }) + if err != nil { + return fmt.Errorf("Error deleting S3 bucket object: %s", err) + } + return nil +} diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go new file mode 100644 index 000000000000..a06232df7740 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go @@ -0,0 +1,98 @@ +package aws + +import ( + "fmt" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "io/ioutil" + "os" + "testing" + + "github.com/awslabs/aws-sdk-go/aws" + "github.com/awslabs/aws-sdk-go/service/s3" +) + +var tf, err = ioutil.TempFile("", "tf") + +func TestAccAWSS3BucketObject_basic(t *testing.T) { + // first write some data to the tempfile just so it's not 0 bytes. + ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if err != nil { + panic(err) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccAWSS3BucketObjectConfig, + Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), + }, + }, + }) +} + +func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error { + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + + for _, rs := range s.RootModule().Resources { + if rs.Type != "aws_s3_bucket_object" { + continue + } + + _, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.ID), + }) + if err == nil { + return fmt.Errorf("AWS S3 Object still exists: %s", rs.Primary.ID) + } + } + return nil +} + +func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + defer os.Remove(tf.Name()) + + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not Found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No S3 Bucket Object ID is set") + } + + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + _, err := s3conn.GetObject( + &s3.GetObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.ID), + }) + if err != nil { + return fmt.Errorf("S3Bucket Object error: %s", err) + } + return nil + } +} + +var randomBucket = randInt +var testAccAWSS3BucketObjectConfig = fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" +} +resource "aws_s3_bucket_object" "object" { + depends_on = "aws_s3_bucket.object_bucket" + bucket = "tf-object-test-bucket-%d" + key = "test-key" + source = "%s" +} +`, randomBucket, randomBucket, tf.Name()) From 3eceddc34dd158056eba7c7b316cfdb7b276a41a Mon Sep 17 00:00:00 2001 From: Michael Austin Date: Tue, 26 May 2015 11:25:03 -0400 Subject: [PATCH 2/2] added documentation --- .../aws/r/s3_bucket_object.html.markdown | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 website/source/docs/providers/aws/r/s3_bucket_object.html.markdown diff --git a/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown new file mode 100644 index 000000000000..3f55a1d45342 --- /dev/null +++ b/website/source/docs/providers/aws/r/s3_bucket_object.html.markdown @@ -0,0 +1,36 @@ +--- +layout: "aws" +page_title: "AWS: aws_s3_bucket_object" +side_bar_current: "docs-aws-resource-s3-bucket-object" +description: |- + Provides a S3 bucket object resource. +--- + +# aws\_s3\_bucket\_object + +Provides a S3 bucket object resource. + +## Example Usage + +### Uploading a file to a bucket + +``` +resource "aws_s3_bucket_object" "object" { + bucket = "your_bucket_name" + key = "new_object_key" + source = "path/to/file" +} +``` + +## Argument Reference + +The following arguments are supported: +* `bucket` - (Required) The name of the bucket to put the file in. +* `key` - (Required) The name of the object once it is in the bucket. +* `source` - (Required) The path to the source file being uploaded to the bucket. + +## Attributes Reference + +The following attributes are exported + +* `id` - the id of the resource corresponds to the ETag of the bucket object on aws.