diff --git a/aws/data_source_aws_s3_bucket_objects.go b/aws/data_source_aws_s3_bucket_objects.go new file mode 100644 index 00000000000..e3fd777b072 --- /dev/null +++ b/aws/data_source_aws_s3_bucket_objects.go @@ -0,0 +1,150 @@ +package aws + +import ( + "fmt" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/helper/schema" +) + +const keyRequestPageSize = 1000 + +func dataSourceAwsS3BucketObjects() *schema.Resource { + return &schema.Resource{ + Read: dataSourceAwsS3BucketObjectsRead, + + Schema: map[string]*schema.Schema{ + "bucket": { + Type: schema.TypeString, + Required: true, + }, + "prefix": { + Type: schema.TypeString, + Optional: true, + }, + "delimiter": { + Type: schema.TypeString, + Optional: true, + }, + "encoding_type": { + Type: schema.TypeString, + Optional: true, + }, + "max_keys": { + Type: schema.TypeInt, + Optional: true, + Default: 1000, + }, + "start_after": { + Type: schema.TypeString, + Optional: true, + }, + "fetch_owner": { + Type: schema.TypeBool, + Optional: true, + }, + "keys": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "common_prefixes": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "owners": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + } +} + +func dataSourceAwsS3BucketObjectsRead(d *schema.ResourceData, meta interface{}) error { + conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + prefix := d.Get("prefix").(string) + + d.SetId(resource.UniqueId()) + + listInput := s3.ListObjectsV2Input{ + Bucket: aws.String(bucket), + } + + if prefix != "" { + listInput.Prefix = aws.String(prefix) + } + + if s, ok := d.GetOk("delimiter"); ok { + listInput.Delimiter = aws.String(s.(string)) + } + + if s, ok := d.GetOk("encoding_type"); ok { + listInput.EncodingType = aws.String(s.(string)) + } + + // "listInput.MaxKeys" refers to max keys returned in a single request + // (i.e., page size), not the total number of keys returned if you page + // through the results. "maxKeys" does refer to total keys returned. + maxKeys := int64(d.Get("max_keys").(int)) + if maxKeys <= keyRequestPageSize { + listInput.MaxKeys = aws.Int64(maxKeys) + } + + if s, ok := d.GetOk("start_after"); ok { + listInput.StartAfter = aws.String(s.(string)) + } + + if b, ok := d.GetOk("fetch_owner"); ok { + listInput.FetchOwner = aws.Bool(b.(bool)) + } + + var commonPrefixes []string + var keys []string + var owners []string + + err := conn.ListObjectsV2Pages(&listInput, func(page *s3.ListObjectsV2Output, lastPage bool) bool { + for _, commonPrefix := range page.CommonPrefixes { + commonPrefixes = append(commonPrefixes, aws.StringValue(commonPrefix.Prefix)) + } + + for _, object := range page.Contents { + keys = append(keys, aws.StringValue(object.Key)) + + if object.Owner != nil { + owners = append(owners, aws.StringValue(object.Owner.ID)) + } + } + + maxKeys = maxKeys - aws.Int64Value(page.KeyCount) + + if maxKeys <= keyRequestPageSize { + listInput.MaxKeys = aws.Int64(maxKeys) + } + + return !lastPage + }) + + if err != nil { + return fmt.Errorf("error listing S3 Bucket (%s) Objects: %s", bucket, err) + } + + if err := d.Set("common_prefixes", commonPrefixes); err != nil { + return fmt.Errorf("error setting common_prefixes: %s", err) + } + + if err := d.Set("keys", keys); err != nil { + return fmt.Errorf("error setting keys: %s", err) + } + + if err := d.Set("owners", owners); err != nil { + return fmt.Errorf("error setting owners: %s", err) + } + + return nil +} diff --git a/aws/data_source_aws_s3_bucket_objects_test.go b/aws/data_source_aws_s3_bucket_objects_test.go new file mode 100644 index 00000000000..6f575c68714 --- /dev/null +++ b/aws/data_source_aws_s3_bucket_objects_test.go @@ -0,0 +1,349 @@ +package aws + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccDataSourceAWSS3BucketObjects_basic(t *testing.T) { + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + PreventPostDestroyRefresh: true, + Steps: []resource.TestStep{ + { + Config: testAccAWSDataSourceS3ObjectsConfigResources(rInt), // NOTE: contains no data source + // Does not need Check + }, + { + Config: testAccAWSDataSourceS3ObjectsConfigBasic(rInt), + Check: resource.ComposeTestCheckFunc( + testAccCheckAwsS3ObjectsDataSourceExists("data.aws_s3_bucket_objects.yesh"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.#", "2"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.0", "arch/navajo/north_window"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.1", "arch/navajo/sand_dune"), + ), + }, + }, + }) +} + +func TestAccDataSourceAWSS3BucketObjects_all(t *testing.T) { + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + PreventPostDestroyRefresh: true, + Steps: []resource.TestStep{ + { + Config: testAccAWSDataSourceS3ObjectsConfigResources(rInt), // NOTE: contains no data source + // Does not need Check + }, + { + Config: testAccAWSDataSourceS3ObjectsConfigAll(rInt), + Check: resource.ComposeTestCheckFunc( + testAccCheckAwsS3ObjectsDataSourceExists("data.aws_s3_bucket_objects.yesh"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.#", "7"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.0", "arch/courthouse_towers/landscape"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.1", "arch/navajo/north_window"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.2", "arch/navajo/sand_dune"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.3", "arch/partition/park_avenue"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.4", "arch/rubicon"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.5", "arch/three_gossips/broken"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.6", "arch/three_gossips/turret"), + ), + }, + }, + }) +} + +func TestAccDataSourceAWSS3BucketObjects_prefixes(t *testing.T) { + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + PreventPostDestroyRefresh: true, + Steps: []resource.TestStep{ + { + Config: testAccAWSDataSourceS3ObjectsConfigResources(rInt), // NOTE: contains no data source + // Does not need Check + }, + { + Config: testAccAWSDataSourceS3ObjectsConfigPrefixes(rInt), + Check: resource.ComposeTestCheckFunc( + testAccCheckAwsS3ObjectsDataSourceExists("data.aws_s3_bucket_objects.yesh"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.#", "1"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.0", "arch/rubicon"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "common_prefixes.#", "4"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "common_prefixes.0", "arch/courthouse_towers/"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "common_prefixes.1", "arch/navajo/"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "common_prefixes.2", "arch/partition/"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "common_prefixes.3", "arch/three_gossips/"), + ), + }, + }, + }) +} + +func TestAccDataSourceAWSS3BucketObjects_encoded(t *testing.T) { + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + PreventPostDestroyRefresh: true, + Steps: []resource.TestStep{ + { + Config: testAccAWSDataSourceS3ObjectsConfigExtraResource(rInt), // NOTE: contains no data source + // Does not need Check + }, + { + Config: testAccAWSDataSourceS3ObjectsConfigEncoded(rInt), + Check: resource.ComposeTestCheckFunc( + testAccCheckAwsS3ObjectsDataSourceExists("data.aws_s3_bucket_objects.yesh"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.#", "2"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.0", "arch/ru+b+ic+on"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.1", "arch/rubicon"), + ), + }, + }, + }) +} + +func TestAccDataSourceAWSS3BucketObjects_maxKeys(t *testing.T) { + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + PreventPostDestroyRefresh: true, + Steps: []resource.TestStep{ + { + Config: testAccAWSDataSourceS3ObjectsConfigResources(rInt), // NOTE: contains no data source + // Does not need Check + }, + { + Config: testAccAWSDataSourceS3ObjectsConfigMaxKeys(rInt), + Check: resource.ComposeTestCheckFunc( + testAccCheckAwsS3ObjectsDataSourceExists("data.aws_s3_bucket_objects.yesh"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.#", "2"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.0", "arch/courthouse_towers/landscape"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.1", "arch/navajo/north_window"), + ), + }, + }, + }) +} + +func TestAccDataSourceAWSS3BucketObjects_startAfter(t *testing.T) { + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + PreventPostDestroyRefresh: true, + Steps: []resource.TestStep{ + { + Config: testAccAWSDataSourceS3ObjectsConfigResources(rInt), // NOTE: contains no data source + // Does not need Check + }, + { + Config: testAccAWSDataSourceS3ObjectsConfigStartAfter(rInt), + Check: resource.ComposeTestCheckFunc( + testAccCheckAwsS3ObjectsDataSourceExists("data.aws_s3_bucket_objects.yesh"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.#", "1"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.0", "arch/three_gossips/turret"), + ), + }, + }, + }) +} + +func TestAccDataSourceAWSS3BucketObjects_fetchOwner(t *testing.T) { + rInt := acctest.RandInt() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + PreventPostDestroyRefresh: true, + Steps: []resource.TestStep{ + { + Config: testAccAWSDataSourceS3ObjectsConfigResources(rInt), // NOTE: contains no data source + // Does not need Check + }, + { + Config: testAccAWSDataSourceS3ObjectsConfigOwners(rInt), + Check: resource.ComposeTestCheckFunc( + testAccCheckAwsS3ObjectsDataSourceExists("data.aws_s3_bucket_objects.yesh"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "keys.#", "2"), + resource.TestCheckResourceAttr("data.aws_s3_bucket_objects.yesh", "owners.#", "2"), + ), + }, + }, + }) +} + +func testAccCheckAwsS3ObjectsDataSourceExists(addr string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[addr] + if !ok { + return fmt.Errorf("Can't find S3 objects data source: %s", addr) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("S3 objects data source ID not set") + } + + return nil + } +} + +func testAccAWSDataSourceS3ObjectsConfigResources(randInt int) string { + return fmt.Sprintf(` +resource "aws_s3_bucket" "objects_bucket" { + bucket = "tf-objects-test-bucket-%d" +} + +resource "aws_s3_bucket_object" "object1" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/three_gossips/turret" + content = "Delicate" +} + +resource "aws_s3_bucket_object" "object2" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/three_gossips/broken" + content = "Dark Angel" +} + +resource "aws_s3_bucket_object" "object3" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/navajo/north_window" + content = "Balanced Rock" +} + +resource "aws_s3_bucket_object" "object4" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/navajo/sand_dune" + content = "Queen Victoria Rock" +} + +resource "aws_s3_bucket_object" "object5" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/partition/park_avenue" + content = "Double-O" +} + +resource "aws_s3_bucket_object" "object6" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/courthouse_towers/landscape" + content = "Fiery Furnace" +} + +resource "aws_s3_bucket_object" "object7" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/rubicon" + content = "Devils Garden" +} +`, randInt) +} + +func testAccAWSDataSourceS3ObjectsConfigBasic(randInt int) string { + return fmt.Sprintf(` +%s + +data "aws_s3_bucket_objects" "yesh" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + prefix = "arch/navajo/" + delimiter = "/" +} +`, testAccAWSDataSourceS3ObjectsConfigResources(randInt)) +} + +func testAccAWSDataSourceS3ObjectsConfigAll(randInt int) string { + return fmt.Sprintf(` +%s + +data "aws_s3_bucket_objects" "yesh" { + bucket = "${aws_s3_bucket.objects_bucket.id}" +} +`, testAccAWSDataSourceS3ObjectsConfigResources(randInt)) +} + +func testAccAWSDataSourceS3ObjectsConfigPrefixes(randInt int) string { + return fmt.Sprintf(` +%s + +data "aws_s3_bucket_objects" "yesh" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + prefix = "arch/" + delimiter = "/" +} +`, testAccAWSDataSourceS3ObjectsConfigResources(randInt)) +} + +func testAccAWSDataSourceS3ObjectsConfigExtraResource(randInt int) string { + return fmt.Sprintf(` +%s + +resource "aws_s3_bucket_object" "object8" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + key = "arch/ru b ic on" + content = "Goose Island" +} +`, testAccAWSDataSourceS3ObjectsConfigResources(randInt)) +} + +func testAccAWSDataSourceS3ObjectsConfigEncoded(randInt int) string { + return fmt.Sprintf(` +%s + +data "aws_s3_bucket_objects" "yesh" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + encoding_type = "url" + prefix = "arch/ru" +} +`, testAccAWSDataSourceS3ObjectsConfigExtraResource(randInt)) +} + +func testAccAWSDataSourceS3ObjectsConfigMaxKeys(randInt int) string { + return fmt.Sprintf(` +%s + +data "aws_s3_bucket_objects" "yesh" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + max_keys = 2 +} +`, testAccAWSDataSourceS3ObjectsConfigResources(randInt)) +} + +func testAccAWSDataSourceS3ObjectsConfigStartAfter(randInt int) string { + return fmt.Sprintf(` +%s + +data "aws_s3_bucket_objects" "yesh" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + start_after = "arch/three_gossips/broken" +} +`, testAccAWSDataSourceS3ObjectsConfigResources(randInt)) +} + +func testAccAWSDataSourceS3ObjectsConfigOwners(randInt int) string { + return fmt.Sprintf(` +%s + +data "aws_s3_bucket_objects" "yesh" { + bucket = "${aws_s3_bucket.objects_bucket.id}" + prefix = "arch/three_gossips/" + fetch_owner = true +} +`, testAccAWSDataSourceS3ObjectsConfigResources(randInt)) +} diff --git a/aws/provider.go b/aws/provider.go index bc2407b8405..7ae1da3af02 100644 --- a/aws/provider.go +++ b/aws/provider.go @@ -252,6 +252,7 @@ func Provider() terraform.ResourceProvider { "aws_route53_zone": dataSourceAwsRoute53Zone(), "aws_s3_bucket": dataSourceAwsS3Bucket(), "aws_s3_bucket_object": dataSourceAwsS3BucketObject(), + "aws_s3_bucket_objects": dataSourceAwsS3BucketObjects(), "aws_secretsmanager_secret": dataSourceAwsSecretsManagerSecret(), "aws_secretsmanager_secret_version": dataSourceAwsSecretsManagerSecretVersion(), "aws_servicequotas_service": dataSourceAwsServiceQuotasService(), diff --git a/website/aws.erb b/website/aws.erb index 28b4f152dbe..9ff75a13792 100644 --- a/website/aws.erb +++ b/website/aws.erb @@ -2453,6 +2453,9 @@