Skip to content

Commit

Permalink
Merge branch 'master' into delta-sharing-catalog
Browse files Browse the repository at this point in the history
  • Loading branch information
nfx authored Jan 3, 2023
2 parents 27f53ee + 242ff84 commit 92f2f4b
Show file tree
Hide file tree
Showing 7 changed files with 60 additions and 6 deletions.
1 change: 1 addition & 0 deletions docs/resources/secret.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ In addition to all arguments above, the following attributes are exported:

* `id` - Canonical unique identifier for the secret.
* `last_updated_timestamp` - (Integer) time secret was updated
* `config_reference` - (String) value to use as a secret reference in [Spark configuration and environment variables](https://docs.databricks.com/security/secrets/secrets.html#use-a-secret-in-a-spark-configuration-property-or-environment-variable): `{{secrets/scope/key}}`.


## Import
Expand Down
26 changes: 25 additions & 1 deletion exporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1565,6 +1565,30 @@ func TestImportingDLTPipelines(t *testing.T) {
Resource: "/api/2.0/instance-profiles/list",
Response: getJSONObject("test-data/list-instance-profiles.json"),
},
{
Method: "GET",
Resource: "/api/2.0/secrets/scopes/list",
ReuseRequest: true,
Response: getJSONObject("test-data/secret-scopes-response.json"),
},
{
Method: "GET",
Resource: "/api/2.0/secrets/list?scope=some-kv-scope",
ReuseRequest: true,
Response: getJSONObject("test-data/secret-scopes-list-scope-response.json"),
},
{
Method: "GET",
Resource: "/api/2.0/secrets/acls/list?scope=some-kv-scope",
ReuseRequest: true,
Response: getJSONObject("test-data/secret-scopes-list-scope-acls-response.json"),
},
{
Method: "GET",
Resource: "/api/2.0/secrets/acls/get?principal=test%40test.com&scope=some-kv-scope",
ReuseRequest: true,
Response: getJSONObject("test-data/secret-scopes-get-principal-response.json"),
},
},
func(ctx context.Context, client *common.DatabricksClient) {
tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName())
Expand All @@ -1573,7 +1597,7 @@ func TestImportingDLTPipelines(t *testing.T) {
ic := newImportContext(client)
ic.Directory = tmpDir
ic.listing = "dlt"
ic.services = "dlt,access,notebooks"
ic.services = "dlt,access,notebooks,secrets"

err := ic.Run()
assert.NoError(t, err)
Expand Down
8 changes: 6 additions & 2 deletions exporter/importables.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ var (
jobClustersRegex = regexp.MustCompile(`^((job_cluster|task)\.[0-9]+\.new_cluster\.[0-9]+\.)`)
dltClusterRegex = regexp.MustCompile(`^(cluster\.[0-9]+\.)`)
predefinedClusterPolicies = []string{"Personal Compute", "Job Compute", "Power User Compute", "Shared Compute"}
secretPathRegex = regexp.MustCompile(`^\{\{secrets\/([^\/]+)\/([^}]+)\}\}$`)
)

func generateMountBody(ic *importContext, body *hclwrite.Body, r *resource) error {
Expand Down Expand Up @@ -518,7 +519,7 @@ var resourcesMap map[string]importable = map[string]importable{
return err
}
// TODO: don't export users and admins group
for _, g := range ic.allGroups {
for offset, g := range ic.allGroups {
if !ic.MatchesName(g.DisplayName) {
log.Printf("[INFO] Group %s doesn't match %s filter", g.DisplayName, ic.match)
continue
Expand All @@ -527,6 +528,7 @@ var resourcesMap map[string]importable = map[string]importable{
Resource: "databricks_group",
ID: g.ID,
})
log.Printf("[INFO] Scanned %d of %d groups", offset+1, len(ic.allGroups))
}
return nil
},
Expand Down Expand Up @@ -792,7 +794,7 @@ var resourcesMap map[string]importable = map[string]importable{
ID: scope.Name,
Name: scope.Name,
})
log.Printf("[INFO] Imported %d of %d secret scopes", i, len(scopes))
log.Printf("[INFO] Imported %d of %d secret scopes", i+1, len(scopes))
}
}
return nil
Expand Down Expand Up @@ -1426,6 +1428,8 @@ var resourcesMap map[string]importable = map[string]importable{
})
}
}
ic.emitSecretsFromSecretsPath(cluster.SparkConf)
ic.emitSecretsFromSecretsPath(cluster.SparkEnvVars)
}

if ic.meAdmin {
Expand Down
7 changes: 6 additions & 1 deletion exporter/test-data/get-dlt-pipeline.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,12 @@
"destination": "dbfs:/FileStore/jars/test.jar"
}
}
]
],
"spark_conf": {
"fs.azure.account.auth.type": "OAuth",
"fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
"fs.azure.account.oauth2.client.secret": "{{secrets/some-kv-scope/test-secret}}"
}
}
],
"continuous": true,
Expand Down
18 changes: 16 additions & 2 deletions exporter/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,22 @@ func (ic *importContext) importCluster(c *clusters.Cluster) {
ID: c.PolicyID,
})
}
ic.emitSecretsFromSecretsPath(c.SparkConf)
ic.emitSecretsFromSecretsPath(c.SparkEnvVars)
ic.emitUserOrServicePrincipal(c.SingleUserName)
}

func (ic *importContext) emitSecretsFromSecretsPath(m map[string]string) {
for _, v := range m {
if res := secretPathRegex.FindStringSubmatch(v); res != nil {
ic.Emit(&resource{
Resource: "databricks_secret_scope",
ID: res[1],
})
}
}
}

func (ic *importContext) emitUserOrServicePrincipal(userOrSPName string) {
if userOrSPName == "" {
return
Expand Down Expand Up @@ -352,7 +365,7 @@ func (ic *importContext) importJobs(l []jobs.Job) {
a := jobs.NewJobsAPI(ic.Context, ic.Client)
starterAfter := (nowSeconds - (ic.lastActiveDays * 24 * 60 * 60)) * 1000
i := 0
for _, job := range l {
for offset, job := range l {
if !ic.MatchesName(job.Settings.Name) {
log.Printf("[INFO] Job name %s doesn't match selection %s", job.Settings.Name, ic.match)
continue
Expand Down Expand Up @@ -383,8 +396,9 @@ func (ic *importContext) importJobs(l []jobs.Job) {
ID: job.ID(),
})
i++
log.Printf("[INFO] Imported %d of total %d jobs", i, len(l))
log.Printf("[INFO] Scanned %d of total %d jobs", offset+1, len(l))
}
log.Printf("[INFO] %d of total %d jobs are going to be imported", i, len(l))
}

// returns created file name in "files" directory for the export and error if any
Expand Down
5 changes: 5 additions & 0 deletions secrets/resource_secret.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,10 @@ func ResourceSecret() *schema.Resource {
Type: schema.TypeInt,
Computed: true,
},
"config_reference": {
Type: schema.TypeString,
Computed: true,
},
},
Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
if err := NewSecretsAPI(ctx, c).Create(d.Get("string_value").(string), d.Get("scope").(string),
Expand All @@ -163,6 +167,7 @@ func ResourceSecret() *schema.Resource {
if err != nil {
return err
}
d.Set("config_reference", fmt.Sprintf("{{secrets/%s/%s}}", scope, key))
return d.Set("last_updated_timestamp", m.LastUpdatedTimestamp)
},
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
Expand Down
1 change: 1 addition & 0 deletions secrets/resource_secret_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ func TestResourceSecretRead(t *testing.T) {
assert.Equal(t, 12345678, d.Get("last_updated_timestamp"))
assert.Equal(t, "foo", d.Get("scope"))
assert.Equal(t, "", d.Get("string_value"))
assert.Equal(t, "{{secrets/foo/bar}}", d.Get("config_reference"))
}

func TestResourceSecretRead_NotFound(t *testing.T) {
Expand Down

0 comments on commit 92f2f4b

Please sign in to comment.