Skip to content

Commit

Permalink
Made some of the glue crawler changes based on PR feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
darrenhaken committed Jun 20, 2018
1 parent 51a1ae0 commit e5832d7
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 26 deletions.
59 changes: 39 additions & 20 deletions aws/resource_aws_glue_crawler.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/structure"
"github.com/hashicorp/terraform/helper/validation"
)

func resourceAwsGlueCrawler() *schema.Resource {
Expand Down Expand Up @@ -56,14 +57,19 @@ func resourceAwsGlueCrawler() *schema.Resource {
"delete_behavior": {
Type: schema.TypeString,
Optional: true,
//ValidateFunc: validateDeletion,
//TODO: Write a validate function to ensure value matches enum
ValidateFunc: validation.StringInSlice([]string{
glue.DeleteBehaviorDeleteFromDatabase,
glue.DeleteBehaviorDeprecateInDatabase,
glue.DeleteBehaviorLog,
}, false),
},
"update_behavior": {
Type: schema.TypeString,
Optional: true,
//ValidateFunc: validateUpdate,
//TODO: Write a validate function to ensure value matches enum
ValidateFunc: validation.StringInSlice([]string{
glue.UpdateBehaviorLog,
glue.UpdateBehaviorUpdateInDatabase,
}, false),
},
},
},
Expand Down Expand Up @@ -115,7 +121,7 @@ func resourceAwsGlueCrawler() *schema.Resource {
"configuration": {
Type: schema.TypeString,
Optional: true,
DiffSuppressFunc: suppressEquivalentAwsPolicyDiffs,
DiffSuppressFunc: suppressEquivalentJsonDiffs,
ValidateFunc: validateJsonString,
},
},
Expand Down Expand Up @@ -305,11 +311,18 @@ func resourceAwsGlueCrawlerRead(d *schema.ResourceData, meta interface{}) error
if isAWSErr(err, glue.ErrCodeEntityNotFoundException, "") {
log.Printf("[WARN] Glue Crawler (%s) not found, removing from state", d.Id())
d.SetId("")
return nil
}

return fmt.Errorf("error reading Glue crawler: %s", err.Error())
}

if crawlerOutput.Crawler == nil {
log.Printf("[WARN] Glue Crawler (%s) not found, removing from state", d.Id())
d.SetId("")
return nil
}

d.Set("name", crawlerOutput.Crawler.Name)
d.Set("database_name", crawlerOutput.Crawler.DatabaseName)
d.Set("role", crawlerOutput.Crawler.Role)
Expand All @@ -323,40 +336,46 @@ func resourceAwsGlueCrawlerRead(d *schema.ResourceData, meta interface{}) error
"delete_behavior": *crawlerOutput.Crawler.SchemaChangePolicy.DeleteBehavior,
"update_behavior": *crawlerOutput.Crawler.SchemaChangePolicy.UpdateBehavior,
}
d.Set("schema_change_policy", schemaPolicy)

if err := d.Set("schema_change_policy", []map[string]string{schemaPolicy}); err != nil {
return fmt.Errorf("error setting schema_change_policy: %s", schemaPolicy)
}
}

var s3Targets = crawlerOutput.Crawler.Targets.S3Targets
if crawlerOutput.Crawler.Targets.S3Targets != nil {
if err := d.Set("s3_target", flattenS3Targets(s3Targets)); err != nil {
log.Printf("[ERR] Error setting Glue S3 Targets: %s", err)

if crawlerOutput.Crawler.Targets != nil {
if crawlerOutput.Crawler.Targets.S3Targets != nil {
if err := d.Set("s3_target", flattenGlueS3Targets(s3Targets)); err != nil {
log.Printf("[ERR] Error setting Glue S3 Targets: %s", err)
}
}
}

var jdbcTargets = crawlerOutput.Crawler.Targets.JdbcTargets
if crawlerOutput.Crawler.Targets.JdbcTargets != nil {
if err := d.Set("jdbc_target", flattenJdbcTargets(jdbcTargets)); err != nil {
log.Printf("[ERR] Error setting Glue JDBC Targets: %s", err)
var jdbcTargets = crawlerOutput.Crawler.Targets.JdbcTargets
if crawlerOutput.Crawler.Targets.JdbcTargets != nil {
if err := d.Set("jdbc_target", flattenGlueJdbcTargets(jdbcTargets)); err != nil {
log.Printf("[ERR] Error setting Glue JDBC Targets: %s", err)
}
}
}

// AWS provides no other way to read back the additional_info
if v, ok := d.GetOk("additional_info"); ok {
if v, ok := d.GetOk("configuration"); ok {
info, err := structure.NormalizeJsonString(v)
if err != nil {
return fmt.Errorf("Additional Info contains an invalid JSON: %v", err)
}
d.Set("additional_info", info)
d.Set("configuration", info)
}
return nil
}

func flattenS3Targets(s3Targets []*glue.S3Target) []map[string]interface{} {
func flattenGlueS3Targets(s3Targets []*glue.S3Target) []map[string]interface{} {
result := make([]map[string]interface{}, 0)

for _, s3Target := range s3Targets {
attrs := make(map[string]interface{})
attrs["path"] = *s3Target.Path
attrs["path"] = aws.StringValue(s3Target.Path)

if len(s3Target.Exclusions) > 0 {
attrs["exclusions"] = flattenStringList(s3Target.Exclusions)
Expand All @@ -367,12 +386,12 @@ func flattenS3Targets(s3Targets []*glue.S3Target) []map[string]interface{} {
return result
}

func flattenJdbcTargets(jdbcTargets []*glue.JdbcTarget) []map[string]interface{} {
func flattenGlueJdbcTargets(jdbcTargets []*glue.JdbcTarget) []map[string]interface{} {
result := make([]map[string]interface{}, 0)

for _, jdbcTarget := range jdbcTargets {
attrs := make(map[string]interface{})
attrs["path"] = *jdbcTarget.Path
attrs["path"] = aws.StringValue(jdbcTarget.Path)
attrs["connection_name"] = *jdbcTarget.ConnectionName

if len(jdbcTarget.Exclusions) > 0 {
Expand Down
44 changes: 38 additions & 6 deletions aws/resource_aws_glue_crawler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ import (
func TestAccAWSGlueCrawler_basic(t *testing.T) {
const name = "aws_glue_crawler.test"
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSGlueCrawlerDestroy,
Steps: []resource.TestStep{
{
Config: testAccGlueCrawlerConfigBasic,
Expand All @@ -31,8 +32,9 @@ func TestAccAWSGlueCrawler_basic(t *testing.T) {
func TestAccAWSGlueCrawler_jdbcCrawler(t *testing.T) {
const name = "aws_glue_crawler.test"
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSGlueCrawlerDestroy,
Steps: []resource.TestStep{
{
Config: testAccGlueCrawlerConfigJdbc,
Expand All @@ -51,8 +53,9 @@ func TestAccAWSGlueCrawler_jdbcCrawler(t *testing.T) {
func TestAccAWSGlueCrawler_customCrawlers(t *testing.T) {
const name = "aws_glue_crawler.test"
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSGlueCrawlerDestroy,
Steps: []resource.TestStep{
{
Config: testAccGlueCrawlerConfigCustomClassifiers,
Expand Down Expand Up @@ -100,6 +103,35 @@ func checkGlueCatalogCrawlerExists(name string, crawlerName string) resource.Tes
}
}

func testAccCheckAWSGlueCrawlerDestroy(s *terraform.State) error {
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_glue_crawler" {
continue
}

conn := testAccProvider.Meta().(*AWSClient).glueconn
output, err := conn.GetCrawler(&glue.GetCrawlerInput{
Name: aws.String(rs.Primary.ID),
})

if err != nil {
if isAWSErr(err, glue.ErrCodeEntityNotFoundException, "") {
return nil
}

}

crawler := output.Crawler
if crawler != nil && aws.StringValue(crawler.Name) == rs.Primary.ID {
return fmt.Errorf("Glue Crawler %s still exists", rs.Primary.ID)
}

return err
}

return nil
}

const testAccGlueCrawlerConfigBasic = `
resource "aws_glue_catalog_database" "test_db" {
name = "test_db"
Expand Down

0 comments on commit e5832d7

Please sign in to comment.