Skip to content

Commit

Permalink
Added databricks_recipient resource for Delta Sharing (databricks#1571
Browse files Browse the repository at this point in the history
)
  • Loading branch information
stikkireddy authored Sep 2, 2022
1 parent 10cb62d commit c315d74
Show file tree
Hide file tree
Showing 5 changed files with 319 additions and 0 deletions.
50 changes: 50 additions & 0 deletions catalog/acceptance/mws_recipient_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package acceptance

import (
"testing"

"github.com/databricks/terraform-provider-databricks/internal/acceptance"
"github.com/databricks/terraform-provider-databricks/qa"
)

func TestAccCreateRecipientDb2Open(t *testing.T) {
qa.RequireCloudEnv(t, "aws-uc-prod")
acceptance.Test(t, []acceptance.Step{
{
Template: `
resource "databricks_recipient" "db2open" {
name = "{var.RANDOM}-terraform-db2open-recipient"
comment = "made by terraform"
authentication_type = "TOKEN"
sharing_code = "{var.RANDOM}"
ip_access_list {
allowed_ip_addresses = ["10.0.0.0/16"] // using private ip for acc testing
}
}`,
},
})
}

func TestAccCreateRecipientDb2DbAws(t *testing.T) {
qa.RequireCloudEnv(t, "aws-uc-prod")
acceptance.Test(t, []acceptance.Step{
{
Template: `
resource "databricks_metastore" "recipient_metastore" {
name = "{var.RANDOM}-terraform-recipient-metastore"
storage_root = format("s3a://%s/%s", "{var.RANDOM}", "{var.RANDOM}")
delta_sharing_scope = "INTERNAL"
delta_sharing_recipient_token_lifetime_in_seconds = "60000"
force_destroy = true
lifecycle { ignore_changes = [storage_root] } // fake storage root is causing issues
}
resource "databricks_recipient" "db2db" {
name = "{var.RANDOM}-terraform-db2db-recipient"
comment = "made by terraform"
authentication_type = "DATABRICKS"
data_recipient_global_metastore_id = databricks_metastore.recipient_metastore.global_metastore_id
}`,
},
})
}
98 changes: 98 additions & 0 deletions catalog/resource_recipient.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
package catalog

import (
"context"
"github.com/databricks/terraform-provider-databricks/common"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
)

type RecipientsAPI struct {
client *common.DatabricksClient
context context.Context
}

func NewRecipientsAPI(ctx context.Context, m interface{}) RecipientsAPI {
return RecipientsAPI{m.(*common.DatabricksClient), ctx}
}

type Token struct {
Id string `json:"id,omitempty" tf:"computed"`
CreatedAt int64 `json:"created_at,omitempty" tf:"computed"`
CreatedBy string `json:"created_by,omitempty" tf:"computed"`
ActivationUrl string `json:"activation_url,omitempty" tf:"computed"`
ExpirationTime int64 `json:"expiration_time,omitempty" tf:"computed"`
UpdatedAt int64 `json:"updated_at,omitempty" tf:"computed"`
UpdatedBy string `json:"updated_by,omitempty" tf:"computed"`
}

type IpAccessList struct {
AllowedIpAddresses []string `json:"allowed_ip_addresses"`
}

type RecipientInfo struct {
Name string `json:"name" tf:"force_new"`
Comment string `json:"comment,omitempty"`
SharingCode string `json:"sharing_code,omitempty" tf:"sensitive,force_new,suppress_diff"`
AuthenticationType string `json:"authentication_type" tf:"force_new"`
Tokens []Token `json:"tokens,omitempty" tf:"computed"`
DataRecipientGlobalMetastoreId string `json:"data_recipient_global_metastore_id,omitempty" tf:"force_new,conflicts:ip_access_list"`
IpAccessList *IpAccessList `json:"ip_access_list,omitempty"`
}

type Recipients struct {
Recipients []RecipientInfo `json:"recipients"`
}

func (a RecipientsAPI) createRecipient(ci *RecipientInfo) error {
return a.client.Post(a.context, "/unity-catalog/recipients", ci, ci)
}

func (a RecipientsAPI) getRecipient(name string) (ci RecipientInfo, err error) {
err = a.client.Get(a.context, "/unity-catalog/recipients/"+name, nil, &ci)
return
}

func (a RecipientsAPI) deleteRecipient(name string) error {
return a.client.Delete(a.context, "/unity-catalog/recipients/"+name, nil)
}

func (a RecipientsAPI) updateRecipient(ci *RecipientInfo) error {
patch := map[string]interface{}{"comment": ci.Comment, "ip_access_list": ci.IpAccessList}

return a.client.Patch(a.context, "/unity-catalog/recipients/"+ci.Name, patch)
}

func ResourceRecipient() *schema.Resource {
recipientSchema := common.StructToSchema(RecipientInfo{}, func(m map[string]*schema.Schema) map[string]*schema.Schema {
m["authentication_type"].ValidateFunc = validation.StringInSlice([]string{"TOKEN", "DATABRICKS"}, false)
return m
})
return common.Resource{
Schema: recipientSchema,
Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
var ri RecipientInfo
common.DataToStructPointer(d, recipientSchema, &ri)
if err := NewRecipientsAPI(ctx, c).createRecipient(&ri); err != nil {
return err
}
d.SetId(ri.Name)
return nil
},
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
ri, err := NewRecipientsAPI(ctx, c).getRecipient(d.Id())
if err != nil {
return err
}
return common.StructToData(ri, recipientSchema, d)
},
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
var ri RecipientInfo
common.DataToStructPointer(d, recipientSchema, &ri)
return NewRecipientsAPI(ctx, c).updateRecipient(&ri)
},
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
return NewRecipientsAPI(ctx, c).deleteRecipient(d.Id())
},
}.ToResource()
}
80 changes: 80 additions & 0 deletions catalog/resource_recipient_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
package catalog

import (
"testing"

"github.com/databricks/terraform-provider-databricks/qa"
)

func TestRecipientCornerCases(t *testing.T) {
qa.ResourceCornerCases(t, ResourceRecipient())
}

func TestCreateRecipient(t *testing.T) {
qa.ResourceFixture{
Fixtures: []qa.HTTPFixture{
{
Method: "POST",
Resource: "/api/2.0/unity-catalog/recipients",
ExpectedRequest: RecipientInfo{
Name: "a",
Comment: "b",
SharingCode: "c",
AuthenticationType: "TOKEN",
Tokens: nil,
IpAccessList: &IpAccessList{
AllowedIpAddresses: []string{"0.0.0.0/0"},
},
},
Response: RecipientInfo{
Name: "a",
},
},
{
Method: "GET",
Resource: "/api/2.0/unity-catalog/recipients/a",
Response: RecipientInfo{
Name: "a",
Comment: "b",
SharingCode: "c",
AuthenticationType: "TOKEN",
Tokens: nil,
IpAccessList: &IpAccessList{
AllowedIpAddresses: []string{"0.0.0.0/0"},
},
},
},
},
Resource: ResourceRecipient(),
Create: true,
HCL: `
name = "a"
comment = "b"
authentication_type = "TOKEN"
sharing_code = "c"
ip_access_list {
allowed_ip_addresses = ["0.0.0.0/0"]
}
`,
}.ApplyNoError(t)
}

func TestCreateRecipient_InvalidAuthType(t *testing.T) {
qa.ResourceFixture{
Fixtures: []qa.HTTPFixture{},
Resource: ResourceRecipient(),
Create: true,
HCL: `
name = "a"
comment = "b"
authentication_type = "temp"
sharing_code = "c"
ip_access_list {
allowed_ip_addresses = ["0.0.0.0/0"]
}
`,
}.ExpectError(t, "invalid config supplied. "+
"[authentication_type] expected authentication_type "+
"to be one of [TOKEN DATABRICKS], got temp")

}
90 changes: 90 additions & 0 deletions docs/resources/recipient.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
---
subcategory: "Unity Catalog"
---
# databricks_recipient Resource

Within a metastore, Unity Catalog provides the ability to create a recipient to attach delta shares to.

A `databricks_recipient` is contained within [databricks_metastore](metastore.md) and can contain a list of shares.

## Example Usage

### Databricks Sharing with non databricks recipient

Setting `authentication_type` type to `TOKEN` creates a temporary url to download a credentials file. This is used to
authenticate to the sharing server to access data. This is for when the recipient is not using Databricks.

```hcl
resource "random_password" "db2opensharecode" {
length = 16
special = true
}
data "databricks_current_user" "current" {}
resource "databricks_recipient" "db2open" {
name = "${data.databricks_current_user.current.alphanumeric}-recipient"
comment = "made by terraform"
authentication_type = "TOKEN"
sharing_code = random_password.db2opensharecode.result
ip_access_list {
allowed_ip_addresses = [...] // .. fill in allowed IPv4 addresses (CIDR notation allowed)
}
}
```

### Databricks to Databricks Sharing

Setting `authentication_type` type to `DATABRICKS` allows you to automatically create a provider for a recipient who
is using Databricks. To do this they would need to provide the global metastore id that you will be sharing with. The
global metastore id follows the format: `<cloud>:<region>:<guid>`

```hcl
data "databricks_current_user" "current" {}
resource "databricks_metastore" "recipient_metastore" {
name = "recipient"
storage_root = format("abfss://%s@%s.dfs.core.windows.net/",
azurerm_storage_account.unity_catalog.name,
azurerm_storage_container.unity_catalog.name)
delta_sharing_scope = "INTERNAL"
delta_sharing_recipient_token_lifetime_in_seconds = "60000000"
force_destroy = true
}
resource "databricks_recipient" "db2db" {
name = "${data.databricks_current_user.current.alphanumeric}-recipient"
comment = "made by terraform"
authentication_type = "DATABRICKS"
data_recipient_global_metastore_id = databricks_metastore.recipient_metastore.global_metastore_id
}
```

## Argument Reference

The following arguments are required:

* `name` - Name of recipient. Change forces creation of a new resource.
* `comment` - (Optional) Description about the recipient.
* `sharing_code` - (Optional) The one-time sharing code provided by the data recipient.
* `authentication_type` - (Optional) The delta sharing authentication type. Valid values are `TOKEN` and `DATABRICKS`.
* `data_recipient_global_metastore_id` - Required when authentication_type is DATABRICKS.
* `ip_access_list` - (Optional) The one-time sharing code provided by the data recipient.

### Ip Access List Argument
Only one `ip_access_list` blocks is allowed in a recipient. It conflicts with authentication type DATABRICKS.

```hcl
ip_access_list {
allowed_ip_addresses = ["0.0.0.0/0"]
}
```

Arguments for the `ip_access_list` block are:

Exactly one of the below arguments is required:
* `allowed_ip_addresses` - Allowed IP Addresses in CIDR notation. Limit of 100.

## Attribute Reference:

* `tokens` - (Optional) List of Recipient Tokens.
1 change: 1 addition & 0 deletions provider/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ func DatabricksProvider() *schema.Provider {
"databricks_permission_assignment": access.ResourcePermissionAssignment(),
"databricks_permissions": permissions.ResourcePermissions(),
"databricks_pipeline": pipelines.ResourcePipeline(),
"databricks_recipient": catalog.ResourceRecipient(),
"databricks_repo": repos.ResourceRepo(),
"databricks_schema": catalog.ResourceSchema(),
"databricks_secret": secrets.ResourceSecret(),
Expand Down

0 comments on commit c315d74

Please sign in to comment.