Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Exporter: generate references that are matching only to a prefix #1890

Merged
merged 4 commits into from
Jan 4, 2023
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 57 additions & 25 deletions exporter/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ func (ic *importContext) MatchesName(n string) bool {
return strings.Contains(strings.ToLower(n), strings.ToLower(ic.match))
}

func (ic *importContext) Find(r *resource, pick string) hcl.Traversal {
func (ic *importContext) Find(r *resource, pick string, matchType MatchType) (string, hcl.Traversal) {
for _, sr := range ic.State.Resources {
if sr.Type != r.Resource {
continue
Expand All @@ -313,24 +313,36 @@ func (ic *importContext) Find(r *resource, pick string) hcl.Traversal {
r.Attribute, r.Resource, r.Name, r.ID)
continue
}
if v.(string) == r.Value {
if sr.Mode == "data" {
return hcl.Traversal{
hcl.TraverseRoot{Name: "data"},
hcl.TraverseAttr{Name: sr.Type},
hcl.TraverseAttr{Name: sr.Name},
hcl.TraverseAttr{Name: pick},
}
}
return hcl.Traversal{
hcl.TraverseRoot{Name: sr.Type},
strValue := v.(string)
matched := false
switch matchType {
case MatchExact:
matched = (strValue == r.Value)
case MatchPrefix:
matched = strings.HasPrefix(r.Value, strValue)
default:
log.Printf("[WARN] Unsupported match type: %s", matchType)
}
if !matched {
continue
}
if sr.Mode == "data" {
return strValue, hcl.Traversal{
hcl.TraverseRoot{Name: "data"},
hcl.TraverseAttr{Name: sr.Type},
hcl.TraverseAttr{Name: sr.Name},
hcl.TraverseAttr{Name: pick},
}
}
return strValue, hcl.Traversal{
hcl.TraverseRoot{Name: sr.Type},
hcl.TraverseAttr{Name: sr.Name},
hcl.TraverseAttr{Name: pick},
}

}
}
return nil
return "", nil
}

func (ic *importContext) Has(r *resource) bool {
Expand Down Expand Up @@ -486,6 +498,35 @@ func (ic *importContext) Emit(r *resource) {
ic.Add(r)
}

func (ic *importContext) getTraversalTokens(ref reference, value string) hclwrite.Tokens {
matchType := ref.MatchTypeValue()
attr := ref.MatchAttribute()
attrValue, traversal := ic.Find(&resource{
Resource: ref.Resource,
Attribute: attr,
Value: value,
}, attr, matchType)
// at least one invocation of ic.Find will assign Nil to traversal if resource with value is not found
if traversal == nil {
return nil
}
switch matchType {
case MatchExact:
return hclwrite.TokensForTraversal(traversal)
case MatchPrefix:
rest := value[len(attrValue):]
tokens := hclwrite.Tokens{&hclwrite.Token{Type: hclsyntax.TokenOQuote, Bytes: []byte{'"', '$', '{'}}}
tokens = append(tokens, hclwrite.TokensForTraversal(traversal)...)
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'}'}})
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenQuotedLit, Bytes: []byte(rest)})
tokens = append(tokens, &hclwrite.Token{Type: hclsyntax.TokenCQuote, Bytes: []byte{'"'}})
return tokens
default:
log.Printf("[WARN] Unsupported match type: %s", ref.MatchType)
}
return nil
}

// TODO: move to IC
var dependsRe = regexp.MustCompile(`(\.[\d]+)`)

Expand All @@ -506,18 +547,9 @@ func (ic *importContext) reference(i importable, path []string, value string) hc
if d.Variable {
return ic.variable(fmt.Sprintf("%s_%s", path[0], value), "")
}
attr := "id"
if d.Match != "" {
attr = d.Match
}
traversal := ic.Find(&resource{
Resource: d.Resource,
Attribute: attr,
Value: value,
}, attr)
//at least one invocation of ic.Find will assign Nil to traversal if resource with value is not found
if traversal != nil {
return hclwrite.TokensForTraversal(traversal)

if tokens := ic.getTraversalTokens(d, value); tokens != nil {
return tokens
}
}
return hclwrite.TokensForValue(cty.StringVal(value))
Expand Down
5 changes: 3 additions & 2 deletions exporter/context_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func TestMatchesName(t *testing.T) {
}

func TestImportContextFindSkips(t *testing.T) {
assert.Nil(t, (&importContext{
_, traversal := (&importContext{
State: stateApproximation{
Resources: []resourceApproximation{
{
Expand All @@ -36,7 +36,8 @@ func TestImportContextFindSkips(t *testing.T) {
Resource: "a",
Attribute: "b",
Name: "c",
}, "x"))
}, "x", "")
assert.Nil(t, traversal)
}

func TestImportContextHas(t *testing.T) {
Expand Down
47 changes: 46 additions & 1 deletion exporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1533,6 +1533,51 @@ func TestImportingDLTPipelines(t *testing.T) {
},
},
},
{
Method: "GET",
Resource: "/api/2.0/permissions/repos/123",
Response: getJSONObject("test-data/get-repo-permissions.json"),
},
{
Method: "GET",
Resource: "/api/2.0/workspace/get-status?path=%2FRepos%2Fuser%40domain.com%2Frepo",
Response: workspace.ObjectStatus{
ObjectID: 123,
ObjectType: "REPO",
Path: "/Repos/user@domain.com/repo",
},
},
{
Method: "GET",
Resource: "/api/2.0/repos/123",
Response: repos.ReposInformation{
ID: 123,
Url: "https://github.com/user/test.git",
Provider: "gitHub",
Path: "/Repos/user@domain.com/repo",
HeadCommitID: "1124323423abc23424",
Branch: "releases",
},
ReuseRequest: true,
},
{
Method: "GET",
Resource: "/api/2.0/preview/scim/v2/Users?filter=userName%20eq%20%27user%40domain.com%27",
Response: scim.UserList{
Resources: []scim.User{
{ID: "123", DisplayName: "user@domain.com", UserName: "user@domain.com"},
},
StartIndex: 1,
TotalResults: 1,
ItemsPerPage: 1,
},
ReuseRequest: true,
},
{
Method: "GET",
Resource: "/api/2.0/preview/scim/v2/Users/123",
Response: scim.User{ID: "123", DisplayName: "user@domain.com", UserName: "user@domain.com"},
},
{
Method: "GET",
Resource: "/api/2.0/pipelines/123",
Expand Down Expand Up @@ -1597,7 +1642,7 @@ func TestImportingDLTPipelines(t *testing.T) {
ic := newImportContext(client)
ic.Directory = tmpDir
ic.listing = "dlt"
ic.services = "dlt,access,notebooks,secrets"
ic.services = "dlt,access,notebooks,users,repos,secrets"

err := ic.Run()
assert.NoError(t, err)
Expand Down
50 changes: 37 additions & 13 deletions exporter/importables.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ var (
nameNormalizationRegex = regexp.MustCompile(`\W+`)
jobClustersRegex = regexp.MustCompile(`^((job_cluster|task)\.[0-9]+\.new_cluster\.[0-9]+\.)`)
dltClusterRegex = regexp.MustCompile(`^(cluster\.[0-9]+\.)`)
uuidRegex = regexp.MustCompile(`^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$`)
predefinedClusterPolicies = []string{"Personal Compute", "Job Compute", "Power User Compute", "Shared Compute"}
secretPathRegex = regexp.MustCompile(`^\{\{secrets\/([^\/]+)\/([^}]+)\}\}$`)
)
Expand Down Expand Up @@ -287,6 +288,7 @@ var resourcesMap map[string]importable = map[string]importable{
{Path: "spark_python_task.parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "spark_jar_task.jar_uri", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "notebook_task.notebook_path", Resource: "databricks_notebook"},
{Path: "notebook_task.notebook_path", Resource: "databricks_repo", Match: "path", MatchType: MatchPrefix},
{Path: "pipeline_task.pipeline_id", Resource: "databricks_pipeline"},
{Path: "task.library.jar", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "task.library.whl", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
Expand All @@ -295,6 +297,7 @@ var resourcesMap map[string]importable = map[string]importable{
{Path: "task.spark_python_task.parameters", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "task.spark_jar_task.jar_uri", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "task.notebook_task.notebook_path", Resource: "databricks_notebook"},
{Path: "task.notebook_task.notebook_path", Resource: "databricks_repo", Match: "path", MatchType: MatchPrefix},
{Path: "task.pipeline_task.pipeline_id", Resource: "databricks_pipeline"},
{Path: "task.sql_task.query.query_id", Resource: "databricks_sql_query"},
{Path: "task.sql_task.dashboard.dashboard_id", Resource: "databricks_sql_dashboard"},
Expand Down Expand Up @@ -359,10 +362,7 @@ var resourcesMap map[string]importable = map[string]importable{
}
}
if job.NotebookTask != nil {
ic.Emit(&resource{
Resource: "databricks_notebook",
ID: job.NotebookTask.NotebookPath,
})
ic.emitNotebookOrRepo(job.NotebookTask.NotebookPath)
}
if job.PipelineTask != nil {
ic.Emit(&resource{
Expand All @@ -373,10 +373,7 @@ var resourcesMap map[string]importable = map[string]importable{
// Support for multitask jobs
for _, task := range job.Tasks {
if task.NotebookTask != nil {
ic.Emit(&resource{
Resource: "databricks_notebook",
ID: task.NotebookTask.NotebookPath,
})
ic.emitNotebookOrRepo(task.NotebookTask.NotebookPath)
}
if task.PipelineTask != nil {
ic.Emit(&resource{
Expand Down Expand Up @@ -958,7 +955,21 @@ var resourcesMap map[string]importable = map[string]importable{
if name == "" {
return d.Id()
}
return strings.TrimPrefix(name, "/")
return nameNormalizationRegex.ReplaceAllString(name[7:], "_")
},
Search: func(ic *importContext, r *resource) error {
reposAPI := repos.NewReposAPI(ic.Context, ic.Client)
notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client)
repoDir, err := notebooksAPI.Read(r.Value)
if err != nil {
return err
}
repo, err := reposAPI.Read(fmt.Sprintf("%d", repoDir.ObjectID))
if err != nil {
return err
}
r.ID = fmt.Sprintf("%d", repo.ID)
return nil
},
List: func(ic *importContext) error {
repoList, err := repos.NewReposAPI(ic.Context, ic.Client).ListAll()
Expand All @@ -977,6 +988,7 @@ var resourcesMap map[string]importable = map[string]importable{
return nil
},
Import: func(ic *importContext, r *resource) error {
ic.emitUserOrServicePrincipalForPath(r.Data.Get("path").(string), "/Repos")
if ic.meAdmin {
ic.Emit(&resource{
Resource: "databricks_permissions",
Expand All @@ -986,6 +998,17 @@ var resourcesMap map[string]importable = map[string]importable{
}
return nil
},
ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
if pathString == "path" {
return false
}
return defaultShouldOmitFieldFunc(ic, pathString, as, d)
},

Depends: []reference{
{Path: "path", Resource: "databricks_user", Match: "repos", MatchType: MatchPrefix},
{Path: "path", Resource: "databricks_service_principal", Match: "repos", MatchType: MatchPrefix},
},
},
"databricks_workspace_conf": {
Service: "workspace",
Expand Down Expand Up @@ -1094,6 +1117,7 @@ var resourcesMap map[string]importable = map[string]importable{
return nil
},
Import: func(ic *importContext, r *resource) error {
ic.emitUserOrServicePrincipalForPath(r.ID, "/Users")
notebooksAPI := workspace.NewNotebooksAPI(ic.Context, ic.Client)
contentB64, err := notebooksAPI.Export(r.ID, "SOURCE")
if err != nil {
Expand All @@ -1118,6 +1142,8 @@ var resourcesMap map[string]importable = map[string]importable{
},
Depends: []reference{
{Path: "source", File: true},
{Path: "path", Resource: "databricks_user", Match: "home", MatchType: MatchPrefix},
{Path: "path", Resource: "databricks_service_principal", Match: "home", MatchType: MatchPrefix},
},
},
"databricks_sql_query": {
Expand Down Expand Up @@ -1391,10 +1417,7 @@ var resourcesMap map[string]importable = map[string]importable{
common.DataToStructPointer(r.Data, s, &pipeline)
for _, lib := range pipeline.Libraries {
if lib.Notebook != nil {
ic.Emit(&resource{
Resource: "databricks_notebook",
ID: lib.Notebook.Path,
})
ic.emitNotebookOrRepo(lib.Notebook.Path)
}
ic.emitIfDbfsFile(lib.Jar)
ic.emitIfDbfsFile(lib.Whl)
Expand Down Expand Up @@ -1452,6 +1475,7 @@ var resourcesMap map[string]importable = map[string]importable{
{Path: "cluster.instance_pool_id", Resource: "databricks_instance_pool"},
{Path: "cluster.driver_instance_pool_id", Resource: "databricks_instance_pool"},
{Path: "library.notebook.path", Resource: "databricks_notebook"},
{Path: "library.notebook.path", Resource: "databricks_repo", Match: "path", MatchType: MatchPrefix},
{Path: "library.jar", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
{Path: "library.whl", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
},
Expand Down
34 changes: 29 additions & 5 deletions exporter/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,36 @@ type importable struct {
ApiVersion common.ApiVersion
}

type MatchType string

const (
// MatchExact is to specify that whole value should match
MatchExact = "exact"
// MatchPrefix is to specify that prefix of value should match
MatchPrefix = "prefix"
)

type reference struct {
Path string
Resource string
Match string
Variable bool
File bool
Path string
Resource string
Match string
MatchType MatchType // type of match, `prefix` - reference is embedded into string, `` (or `exact`) - full match
Variable bool
File bool
}

func (r reference) MatchAttribute() string {
if r.Match != "" {
return r.Match
}
return "id"
}

func (r reference) MatchTypeValue() MatchType {
if r.MatchType == "" {
return MatchExact
}
return r.MatchType
}

type resource struct {
Expand Down
5 changes: 5 additions & 0 deletions exporter/test-data/get-dlt-pipeline.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@
"notebook": {
"path": "/Users/user@domain.com/Test DLT"
}
},
{
"notebook": {
"path": "/Repos/user@domain.com/repo/Test DLT"
}
}
],
"name": "Test DLT",
Expand Down
Loading