Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate databricks_catalogs data to Go SDK #2038

Merged
merged 2 commits into from
Feb 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 5 additions & 7 deletions catalog/data_catalogs.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,22 +3,20 @@ package catalog
import (
"context"

"github.com/databricks/databricks-sdk-go"
"github.com/databricks/terraform-provider-databricks/common"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

func DataSourceCatalogs() *schema.Resource {
type catalogsData struct {
return common.WorkspaceData(func(ctx context.Context, data *struct {
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
}
return common.DataResource(catalogsData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error {
data := e.(*catalogsData)
catalogsAPI := NewCatalogsAPI(ctx, c)
catalogs, err := catalogsAPI.list()
}, w *databricks.WorkspaceClient) error {
catalogs, err := w.Catalogs.ListAll(ctx)
if err != nil {
return err
}
for _, v := range catalogs.Catalogs {
for _, v := range catalogs {
data.Ids = append(data.Ids, v.Name)
}
return nil
Expand Down
4 changes: 3 additions & 1 deletion catalog/data_catalogs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@ func TestCatalogsData(t *testing.T) {
Read: true,
NonWritable: true,
ID: "_",
}.ApplyNoError(t)
}.ApplyAndExpectData(t, map[string]any{
"ids": []string{"a", "b"},
})
}

func TestCatalogsData_Error(t *testing.T) {
Expand Down
5 changes: 0 additions & 5 deletions catalog/resource_catalog.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,6 @@ type Catalogs struct {
Catalogs []CatalogInfo `json:"catalogs"`
}

func (a CatalogsAPI) list() (catalogs Catalogs, err error) {
err = a.client.Get(a.context, "/unity-catalog/catalogs", nil, &catalogs)
return
}

func (a CatalogsAPI) createCatalog(ci *CatalogInfo) error {
return a.client.Post(a.context, "/unity-catalog/catalogs", ci, ci)
}
Expand Down
51 changes: 51 additions & 0 deletions common/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"regexp"
"strings"

"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
Expand Down Expand Up @@ -182,6 +183,7 @@ func makeEmptyBlockSuppressFunc(name string) func(k, old, new string, d *schema.
}
}

// Deprecated: migrate to WorkspaceData
func DataResource(sc any, read func(context.Context, any, *DatabricksClient) error) *schema.Resource {
// TODO: migrate to go1.18 and get schema from second function argument?..
s := StructToSchema(sc, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m })
Expand Down Expand Up @@ -213,3 +215,52 @@ func DataResource(sc any, read func(context.Context, any, *DatabricksClient) err
},
}
}

// WorkspaceData is a generic way to define data resources in Terraform provider.
//
// Example usage:
//
// type catalogsData struct {
// Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
// }
// return common.WorkspaceData(func(ctx context.Context, data *catalogsData, w *databricks.WorkspaceClient) error {
// catalogs, err := w.Catalogs.ListAll(ctx)
// ...
// })
func WorkspaceData[T any](read func(context.Context, *T, *databricks.WorkspaceClient) error) *schema.Resource {
var dummy T
s := StructToSchema(dummy, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m })
return &schema.Resource{
Schema: s,
ReadContext: func(ctx context.Context, d *schema.ResourceData, m any) (diags diag.Diagnostics) {
defer func() {
// using recoverable() would cause more complex rewrapping of DataToStructPointer & StructToData
if panic := recover(); panic != nil {
diags = diag.Errorf("panic: %v", panic)
}
}()
ptr := reflect.New(reflect.ValueOf(dummy).Type())
DataToReflectValue(d, &schema.Resource{Schema: s}, ptr.Elem())
client := m.(*DatabricksClient)
w, err := client.WorkspaceClient()
if err != nil {
err = nicerError(ctx, err, "read data")
return diag.FromErr(err)
}
err = read(ctx, ptr.Interface().(*T), w)
if err != nil {
err = nicerError(ctx, err, "read data")
diags = diag.FromErr(err)
}
StructToData(ptr.Elem().Interface(), s, d)
// check if the resource schema has the `id` attribute (marked with `json:"id"` in the provided structure).
// and if yes, then use it as resource ID. If not, then use default value for resource ID (`_`)
if _, ok := s["id"]; ok {
d.SetId(d.Get("id").(string))
} else {
d.SetId("_")
}
return
},
}
}