Skip to content

Commit

Permalink
Correct exporting of the computed attributes for databricks_cluster (
Browse files Browse the repository at this point in the history
  • Loading branch information
alexott authored Nov 4, 2022
1 parent 86884f3 commit f840c46
Show file tree
Hide file tree
Showing 7 changed files with 86 additions and 9 deletions.
11 changes: 8 additions & 3 deletions exporter/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -533,11 +533,16 @@ func (ic *importContext) dataToHcl(i importable, path []string,
})
for _, tuple := range ss {
a, as := tuple.Field, tuple.Schema
if as.Computed {
continue
}
pathString := strings.Join(append(path, a), ".")
raw, ok := d.GetOk(pathString)
if i.ShouldOmitField == nil { // we don't have custom function, so skip computed & default fields
// log.Printf("[DEBUG] path=%s, raw='%v'", pathString, raw)
if defaultShouldOmitFieldFunc(ic, pathString, as, d) {
continue
}
} else if i.ShouldOmitField(ic, pathString, as, d) {
continue
}
for _, r := range i.Depends {
if r.Path == pathString && r.Variable {
// sensitive fields are moved to variable depends
Expand Down
22 changes: 21 additions & 1 deletion exporter/importables.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ var (
gsRegex = regexp.MustCompile(`^gs://([^/]+)(/.*)?$`)
globalWorkspaceConfName = "global_workspace_conf"
notebookPathToIdRegex = regexp.MustCompile(`[^a-zA-Z0-9]+`)
jobClustersRegex = regexp.MustCompile(`^((job_cluster|task)\.[0-9]+\.new_cluster\.[0-9]+\.)`)
dltClusterRegex = regexp.MustCompile(`^(cluster\.[0-9]+\.)`)
)

type dbsqlListResponse struct {
Expand Down Expand Up @@ -297,6 +299,7 @@ var resourcesMap map[string]importable = map[string]importable{
}
return ic.importLibraries(r.Data, s)
},
ShouldOmitField: makeShouldOmitFieldForCluster(nil),
},
"databricks_job": {
ApiVersion: common.API_2_1,
Expand Down Expand Up @@ -472,6 +475,16 @@ var resourcesMap map[string]importable = map[string]importable{
}
return nil
},
ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
switch pathString {
case "url", "format":
return true
}
if res := jobClustersRegex.FindStringSubmatch(pathString); res != nil { // analyze job clusters
return makeShouldOmitFieldForCluster(jobClustersRegex)(ic, pathString, as, d)
}
return defaultShouldOmitFieldFunc(ic, pathString, as, d)
},
},
"databricks_cluster_policy": {
Service: "compute",
Expand Down Expand Up @@ -1349,7 +1362,14 @@ var resourcesMap map[string]importable = map[string]importable{
})
}
return nil
}, Depends: []reference{
},
ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
if res := dltClusterRegex.FindStringSubmatch(pathString); res != nil { // analyze DLT clusters
return makeShouldOmitFieldForCluster(dltClusterRegex)(ic, pathString, as, d)
}
return defaultShouldOmitFieldFunc(ic, pathString, as, d)
},
Depends: []reference{
{Path: "creator_user_name", Resource: "databricks_user", Match: "user_name"},
{Path: "cluster.aws_attributes.instance_profile_arn", Resource: "databricks_instance_profile"},
{Path: "new_cluster.init_scripts.dbfs.destination", Resource: "databricks_dbfs_file"},
Expand Down
2 changes: 2 additions & 0 deletions exporter/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ type importable struct {
Body func(ic *importContext, body *hclwrite.Body, r *resource) error
// Function to detect if the given resource should be ignored or not
Ignore func(ic *importContext, r *resource) bool
// Function to check if the field in the given resource should be omitted or not
ShouldOmitField func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool
// Defines which API version should be used for this specific resource
ApiVersion common.ApiVersion
}
Expand Down
6 changes: 3 additions & 3 deletions exporter/test-data/clusters-list-response.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
},
"driver_node_type_id": "Standard_DS3_v2",
"enable_elastic_disk": true,
"enable_local_disk_encryption": false,
"enable_local_disk_encryption": true,
"executors": [
{
"host_private_ip": "10.139.0.9",
Expand Down Expand Up @@ -118,7 +118,7 @@
},
"driver_node_type_id": "Standard_F4s",
"enable_elastic_disk": true,
"enable_local_disk_encryption": false,
"enable_local_disk_encryption": true,
"executors": [
{
"host_private_ip": "10.139.0.7",
Expand Down Expand Up @@ -188,7 +188,7 @@
},
"driver_node_type_id": "i3.4xlarge",
"enable_elastic_disk": true,
"enable_local_disk_encryption": false,
"enable_local_disk_encryption": true,
"executors": [
{
"host_private_ip": "10.0.234.198",
Expand Down
2 changes: 1 addition & 1 deletion exporter/test-data/get-cluster-test1-response.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
},
"driver_node_type_id": "Standard_DS3_v2",
"enable_elastic_disk": true,
"enable_local_disk_encryption": false,
"enable_local_disk_encryption": true,
"executors": [
{
"host_private_ip": "10.139.0.9",
Expand Down
2 changes: 1 addition & 1 deletion exporter/test-data/get-cluster-test2-response.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
},
"driver_node_type_id": "Standard_F4s",
"enable_elastic_disk": true,
"enable_local_disk_encryption": false,
"enable_local_disk_encryption": true,
"executors": [
{
"host_private_ip": "10.139.0.7",
Expand Down
50 changes: 50 additions & 0 deletions exporter/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import (
"log"
"os"
"path"
"reflect"
"regexp"
"strings"
"time"

Expand Down Expand Up @@ -41,6 +43,7 @@ func (ic *importContext) importCluster(c *clusters.Cluster) {
})
}
if c.InstancePoolID != "" {
// set enable_elastic_disk to false, and remove aws/gcp/azure_attributes
ic.Emit(&resource{
Resource: "databricks_instance_pool",
ID: c.InstancePoolID,
Expand Down Expand Up @@ -342,3 +345,50 @@ func (ic *importContext) createFileIn(dir, name string, content []byte) (string,
relativeName := strings.Replace(localFileName, ic.Directory+"/", "", 1)
return relativeName, nil
}

func defaultShouldOmitFieldFunc(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
if as.Computed {
return true
} else if as.Default != nil && d.Get(pathString) == as.Default {
return true
}

return false
}

func makeShouldOmitFieldForCluster(regex *regexp.Regexp) func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
return func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
prefix := ""
if regex != nil {
if res := regex.FindStringSubmatch(pathString); res != nil {
prefix = res[0]
} else {
return false
}
}
raw := d.Get(pathString)
if raw != nil {
v := reflect.ValueOf(raw)
if as.Optional && v.IsZero() {
return true
}
}
workerInstPoolID := d.Get(prefix + "instance_pool_id").(string)
switch pathString {
case prefix + "node_type_id":
return workerInstPoolID != ""
case prefix + "driver_node_type_id":
driverInstPoolID := d.Get(prefix + "driver_instance_pool_id").(string)
nodeTypeID := d.Get(prefix + "node_type_id").(string)
return workerInstPoolID != "" || driverInstPoolID != "" || raw.(string) == nodeTypeID
case prefix + "driver_instance_pool_id":
return raw.(string) == workerInstPoolID
case prefix + "enable_elastic_disk", prefix + "aws_attributes", prefix + "azure_attributes", prefix + "gcp_attributes":
return workerInstPoolID != ""
case prefix + "enable_local_disk_encryption":
return false
}

return defaultShouldOmitFieldFunc(ic, pathString, as, d)
}
}

0 comments on commit f840c46

Please sign in to comment.