Skip to content

Commit

Permalink
Basic/Update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mbfrahry committed Apr 15, 2019
1 parent 47fc079 commit 6822cfa
Show file tree
Hide file tree
Showing 5 changed files with 216 additions and 60 deletions.
6 changes: 1 addition & 5 deletions azurerm/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -226,14 +226,10 @@ type ArmClient struct {
sqlVirtualNetworkRulesClient sql.VirtualNetworkRulesClient

// Data Factory
<<<<<<< HEAD
dataFactoryClient datafactory.FactoriesClient
dataFactoryPipelineClient datafactory.PipelinesClient
=======
dataFactoryPipelineClient datafactory.PipelinesClient
dataFactoryClient datafactory.FactoriesClient
dataFactoryDatasetClient datafactory.DatasetsClient
dataFactoryLinkedServiceClient datafactory.LinkedServicesClient
>>>>>>> master

// Data Lake Store
dataLakeStoreAccountClient storeAccount.AccountsClient
Expand Down
31 changes: 31 additions & 0 deletions azurerm/data_factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,34 @@ func flattenDataFactoryAnnotations(input *[]interface{}) []string {
}
return annotations
}

func expandDataFactoryVariables(input map[string]interface{}) map[string]*datafactory.VariableSpecification {
output := make(map[string]*datafactory.VariableSpecification)

for k, v := range input {
output[k] = &datafactory.VariableSpecification{
Type: datafactory.VariableTypeString,
DefaultValue: v.(string),
}
}

return output
}

func flattenDataFactoryVariables(input map[string]*datafactory.VariableSpecification) map[string]interface{} {
output := make(map[string]interface{})

for k, v := range input {
if v != nil {
// we only support string parameters at this time
val, ok := v.DefaultValue.(string)
if !ok {
log.Printf("[DEBUG] Skipping variable %q since it's not a string", k)
}

output[k] = val
}
}

return output
}
85 changes: 42 additions & 43 deletions azurerm/resource_arm_data_factory_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,23 @@ func resourceArmDataFactoryPipeline() *schema.Resource {
Optional: true,
},

/*
"activity": {
Type: schema.TypeList,
Required: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{},
},
},*/
"variables": {
Type: schema.TypeMap,
Optional: true,
},

"description": {
Type: schema.TypeString,
Optional: true,
},

"annotations": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
},
}
}
Expand Down Expand Up @@ -77,10 +86,16 @@ func resourceArmDataFactoryPipelineCreateUpdate(d *schema.ResourceData, meta int
}
}

parameters := expandDataFactoryPipelineParameters(d.Get("parameters").(map[string]interface{}))

description := d.Get("description").(string)
pipeline := &datafactory.Pipeline{
Parameters: parameters,
Parameters: expandDataFactoryParameters(d.Get("parameters").(map[string]interface{})),
Variables: expandDataFactoryVariables(d.Get("variables").(map[string]interface{})),
Description: &description,
}

if v, ok := d.GetOk("annotations"); ok {
annotations := v.([]interface{})
pipeline.Annotations = &annotations
}

config := datafactory.PipelineResource{
Expand Down Expand Up @@ -133,10 +148,25 @@ func resourceArmDataFactoryPipelineRead(d *schema.ResourceData, meta interface{}
d.Set("data_factory_name", dataFactoryName)

if props := resp.Pipeline; props != nil {
parameters := flattenDataFactoryPipelineParameters(props.Parameters)
if props.Description != nil {
d.Set("description", props.Description)
}

parameters := flattenDataFactoryParameters(props.Parameters)
if err := d.Set("parameters", parameters); err != nil {
return fmt.Errorf("Error setting `parameters`: %+v", err)
}

annotations := flattenDataFactoryAnnotations(props.Annotations)
if err := d.Set("annotations", annotations); err != nil {
return fmt.Errorf("Error setting `annotations`: %+v", err)
}

variables := flattenDataFactoryVariables(props.Variables)
if err := d.Set("variables", variables); err != nil {
return fmt.Errorf("Error setting `variables`: %+v", err)
}

}

return nil
Expand Down Expand Up @@ -170,34 +200,3 @@ func validateAzureRMDataFactoryPipelineName(v interface{}, k string) (warnings [

return warnings, errors
}

func expandDataFactoryPipelineParameters(input map[string]interface{}) map[string]*datafactory.ParameterSpecification {
output := make(map[string]*datafactory.ParameterSpecification)

for k, v := range input {
output[k] = &datafactory.ParameterSpecification{
Type: datafactory.ParameterTypeString,
DefaultValue: v.(string),
}
}

return output
}

func flattenDataFactoryPipelineParameters(input map[string]*datafactory.ParameterSpecification) map[string]interface{} {
output := make(map[string]interface{})

for k, v := range input {
if v != nil {
// we only support string parameters at this time
val, ok := v.DefaultValue.(string)
if !ok {
log.Printf("[DEBUG] Skipping parameter %q since it's not a string", k)
}

output[k] = val
}
}

return output
}
90 changes: 78 additions & 12 deletions azurerm/resource_arm_data_factory_pipeline_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,32 @@ func TestAccAzureRMDataFactoryPipeline_basic(t *testing.T) {
resourceName := "azurerm_data_factory_pipeline.test"
ri := tf.AccRandTimeInt()
config := testAccAzureRMDataFactoryPipeline_basic(ri, testLocation())
config2 := testAccAzureRMDataFactoryPipeline_update(ri, testLocation())

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testCheckAzureRMDataFactoryPipelineDestroy,
Steps: []resource.TestStep{
{
Config: config,
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryPipelineExists(resourceName),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}

func TestAccAzureRMDataFactoryPipeline_update(t *testing.T) {
resourceName := "azurerm_data_factory_pipeline.test"
ri := tf.AccRandTimeInt()
config := testAccAzureRMDataFactoryPipeline_update1(ri, testLocation())
config2 := testAccAzureRMDataFactoryPipeline_update2(ri, testLocation())

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Expand All @@ -26,16 +51,19 @@ func TestAccAzureRMDataFactoryPipeline_basic(t *testing.T) {
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryPipelineExists(resourceName),
resource.TestCheckResourceAttr(resourceName, "parameters.%", "1"),
resource.TestCheckResourceAttr(resourceName, "parameters.test", "testparameter"),
resource.TestCheckResourceAttr(resourceName, "annotations.#", "3"),
resource.TestCheckResourceAttr(resourceName, "description", "test description"),
resource.TestCheckResourceAttr(resourceName, "variables.%", "2"),
),
},
{
Config: config2,
Check: resource.ComposeTestCheckFunc(
testCheckAzureRMDataFactoryPipelineExists(resourceName),
resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"),
resource.TestCheckResourceAttr(resourceName, "parameters.test", "testparameter"),
resource.TestCheckResourceAttr(resourceName, "parameters.test2", "testparameter2"),
resource.TestCheckResourceAttr(resourceName, "annotations.#", "2"),
resource.TestCheckResourceAttr(resourceName, "description", "test description2"),
resource.TestCheckResourceAttr(resourceName, "variables.%", "3"),
),
},
{
Expand Down Expand Up @@ -104,7 +132,7 @@ resource "azurerm_resource_group" "test" {
location = "%s"
}
resource "azurerm_data_factory_v2" "test" {
resource "azurerm_data_factory" "test" {
name = "acctestdfv2%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
Expand All @@ -113,23 +141,52 @@ resource "azurerm_data_factory_v2" "test" {
resource "azurerm_data_factory_pipeline" "test" {
name = "acctest%d"
resource_group_name = "${azurerm_resource_group.test.name}"
data_factory_name = "${azurerm_data_factory_v2.test.name}"
data_factory_name = "${azurerm_data_factory.test.name}"
}
`, rInt, location, rInt, rInt)
}

func testAccAzureRMDataFactoryPipeline_update1(rInt int, location string) string {
return fmt.Sprintf(`
resource "azurerm_resource_group" "test" {
name = "acctestrg-%d"
location = "%s"
}
resource "azurerm_data_factory" "test" {
name = "acctestdfv2%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
}
resource "azurerm_data_factory_pipeline" "test" {
name = "acctest%d"
resource_group_name = "${azurerm_resource_group.test.name}"
data_factory_name = "${azurerm_data_factory.test.name}"
annotations = ["test1", "test2", "test3"]
description = "test description"
parameters = {
test = "testparameter"
test = "testparameter"
}
variables {
"foo" = "test1"
"bar" = "test2"
}
}
`, rInt, location, rInt, rInt)
}

func testAccAzureRMDataFactoryPipeline_update(rInt int, location string) string {
func testAccAzureRMDataFactoryPipeline_update2(rInt int, location string) string {
return fmt.Sprintf(`
resource "azurerm_resource_group" "test" {
name = "acctestrg-%d"
location = "%s"
}
resource "azurerm_data_factory_v2" "test" {
resource "azurerm_data_factory" "test" {
name = "acctestdfv2%d"
location = "${azurerm_resource_group.test.location}"
resource_group_name = "${azurerm_resource_group.test.name}"
Expand All @@ -138,11 +195,20 @@ resource "azurerm_data_factory_v2" "test" {
resource "azurerm_data_factory_pipeline" "test" {
name = "acctest%d"
resource_group_name = "${azurerm_resource_group.test.name}"
data_factory_name = "${azurerm_data_factory_v2.test.name}"
data_factory_name = "${azurerm_data_factory.test.name}"
annotations = ["test1", "test2"]
description = "test description2"
parameters = {
test = "testparameter"
test2 = "testparameter2"
test = "testparameter"
test2 = "testparameter2"
}
variables {
"foo" = "test1"
"bar" = "test2"
"baz" = "test3"
}
}
`, rInt, location, rInt, rInt)
Expand Down
64 changes: 64 additions & 0 deletions website/docs/r/data_factory_pipeline.html.markdown
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
---
layout: "azurerm"
page_title: "Azure Resource Manager: azurerm_data_factory_pipeline"
sidebar_current: "docs-azurerm-resource-data-factory-pipeline"
description: |-
Manage a Pipeline inside a Azure Data Factory.
---

# azurerm_data_factory_pipeline

Manage a Pipeline inside a Azure Data Factory.

## Example Usage

```hcl
resource "azurerm_resource_group" "example" {
name = "example"
location = "northeurope"
}
resource "azurerm_data_factory" "example" {
name = "example"
location = "${azurerm_resource_group.example.location}"
resource_group_name = "${azurerm_resource_group.example.name}"
}
resource "azurerm_data_factory_pipeline" "example" {
name = "example"
resource_group_name = "${azurerm_resource_group.example.name}"
data_factory_name = "${azurerm_data_factory.example.name}"
}
```

## Argument Reference

The following arguments are supported:

* `name` - (Required) Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions.

* `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Pipeline. Changing this forces a new resource

* `data_factory_name` - (Required) The Data Factory name in which to associate the Pipeline with. Changing this forces a new resource.

* `description` - (Optional) The description for the Data Factory Pipeline.

* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Pipeline.

* `parameters` - (Optional) A map of parameters to associate with the Data Factory Pipeline.

* `variables` - (Optional) A map of variables to associate with the Data Factory Pipeline.

## Attributes Reference

The following attributes are exported:

* `id` - The ID of the Data Factory Pipeline.

## Import

Data Factory Pipeline can be imported using the `resource id`, e.g.

```shell
terraform import azurerm_data_factory_pipeline.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/pipelines/example
```

0 comments on commit 6822cfa

Please sign in to comment.