diff --git a/azurerm/config.go b/azurerm/config.go index 53b12d57e913..d669b68bc3d9 100644 --- a/azurerm/config.go +++ b/azurerm/config.go @@ -226,14 +226,10 @@ type ArmClient struct { sqlVirtualNetworkRulesClient sql.VirtualNetworkRulesClient // Data Factory -<<<<<<< HEAD - dataFactoryClient datafactory.FactoriesClient - dataFactoryPipelineClient datafactory.PipelinesClient -======= + dataFactoryPipelineClient datafactory.PipelinesClient dataFactoryClient datafactory.FactoriesClient dataFactoryDatasetClient datafactory.DatasetsClient dataFactoryLinkedServiceClient datafactory.LinkedServicesClient ->>>>>>> master // Data Lake Store dataLakeStoreAccountClient storeAccount.AccountsClient diff --git a/azurerm/data_factory.go b/azurerm/data_factory.go index 48db9dbf683c..5a2de0759b00 100644 --- a/azurerm/data_factory.go +++ b/azurerm/data_factory.go @@ -52,3 +52,34 @@ func flattenDataFactoryAnnotations(input *[]interface{}) []string { } return annotations } + +func expandDataFactoryVariables(input map[string]interface{}) map[string]*datafactory.VariableSpecification { + output := make(map[string]*datafactory.VariableSpecification) + + for k, v := range input { + output[k] = &datafactory.VariableSpecification{ + Type: datafactory.VariableTypeString, + DefaultValue: v.(string), + } + } + + return output +} + +func flattenDataFactoryVariables(input map[string]*datafactory.VariableSpecification) map[string]interface{} { + output := make(map[string]interface{}) + + for k, v := range input { + if v != nil { + // we only support string parameters at this time + val, ok := v.DefaultValue.(string) + if !ok { + log.Printf("[DEBUG] Skipping variable %q since it's not a string", k) + } + + output[k] = val + } + } + + return output +} diff --git a/azurerm/resource_arm_data_factory_pipeline.go b/azurerm/resource_arm_data_factory_pipeline.go index 2e82228eeae3..f176ecfa7737 100644 --- a/azurerm/resource_arm_data_factory_pipeline.go +++ b/azurerm/resource_arm_data_factory_pipeline.go @@ -42,14 +42,23 @@ func resourceArmDataFactoryPipeline() *schema.Resource { Optional: true, }, - /* - "activity": { - Type: schema.TypeList, - Required: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{}, - }, - },*/ + "variables": { + Type: schema.TypeMap, + Optional: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + }, + + "annotations": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, }, } } @@ -77,10 +86,16 @@ func resourceArmDataFactoryPipelineCreateUpdate(d *schema.ResourceData, meta int } } - parameters := expandDataFactoryPipelineParameters(d.Get("parameters").(map[string]interface{})) - + description := d.Get("description").(string) pipeline := &datafactory.Pipeline{ - Parameters: parameters, + Parameters: expandDataFactoryParameters(d.Get("parameters").(map[string]interface{})), + Variables: expandDataFactoryVariables(d.Get("variables").(map[string]interface{})), + Description: &description, + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + pipeline.Annotations = &annotations } config := datafactory.PipelineResource{ @@ -133,10 +148,25 @@ func resourceArmDataFactoryPipelineRead(d *schema.ResourceData, meta interface{} d.Set("data_factory_name", dataFactoryName) if props := resp.Pipeline; props != nil { - parameters := flattenDataFactoryPipelineParameters(props.Parameters) + if props.Description != nil { + d.Set("description", props.Description) + } + + parameters := flattenDataFactoryParameters(props.Parameters) if err := d.Set("parameters", parameters); err != nil { return fmt.Errorf("Error setting `parameters`: %+v", err) } + + annotations := flattenDataFactoryAnnotations(props.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("Error setting `annotations`: %+v", err) + } + + variables := flattenDataFactoryVariables(props.Variables) + if err := d.Set("variables", variables); err != nil { + return fmt.Errorf("Error setting `variables`: %+v", err) + } + } return nil @@ -170,34 +200,3 @@ func validateAzureRMDataFactoryPipelineName(v interface{}, k string) (warnings [ return warnings, errors } - -func expandDataFactoryPipelineParameters(input map[string]interface{}) map[string]*datafactory.ParameterSpecification { - output := make(map[string]*datafactory.ParameterSpecification) - - for k, v := range input { - output[k] = &datafactory.ParameterSpecification{ - Type: datafactory.ParameterTypeString, - DefaultValue: v.(string), - } - } - - return output -} - -func flattenDataFactoryPipelineParameters(input map[string]*datafactory.ParameterSpecification) map[string]interface{} { - output := make(map[string]interface{}) - - for k, v := range input { - if v != nil { - // we only support string parameters at this time - val, ok := v.DefaultValue.(string) - if !ok { - log.Printf("[DEBUG] Skipping parameter %q since it's not a string", k) - } - - output[k] = val - } - } - - return output -} diff --git a/azurerm/resource_arm_data_factory_pipeline_test.go b/azurerm/resource_arm_data_factory_pipeline_test.go index 65a2724d406a..af24a9527005 100644 --- a/azurerm/resource_arm_data_factory_pipeline_test.go +++ b/azurerm/resource_arm_data_factory_pipeline_test.go @@ -14,7 +14,32 @@ func TestAccAzureRMDataFactoryPipeline_basic(t *testing.T) { resourceName := "azurerm_data_factory_pipeline.test" ri := tf.AccRandTimeInt() config := testAccAzureRMDataFactoryPipeline_basic(ri, testLocation()) - config2 := testAccAzureRMDataFactoryPipeline_update(ri, testLocation()) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataFactoryPipelineDestroy, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryPipelineExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMDataFactoryPipeline_update(t *testing.T) { + resourceName := "azurerm_data_factory_pipeline.test" + ri := tf.AccRandTimeInt() + config := testAccAzureRMDataFactoryPipeline_update1(ri, testLocation()) + config2 := testAccAzureRMDataFactoryPipeline_update2(ri, testLocation()) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, @@ -26,7 +51,9 @@ func TestAccAzureRMDataFactoryPipeline_basic(t *testing.T) { Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataFactoryPipelineExists(resourceName), resource.TestCheckResourceAttr(resourceName, "parameters.%", "1"), - resource.TestCheckResourceAttr(resourceName, "parameters.test", "testparameter"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "3"), + resource.TestCheckResourceAttr(resourceName, "description", "test description"), + resource.TestCheckResourceAttr(resourceName, "variables.%", "2"), ), }, { @@ -34,8 +61,9 @@ func TestAccAzureRMDataFactoryPipeline_basic(t *testing.T) { Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataFactoryPipelineExists(resourceName), resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"), - resource.TestCheckResourceAttr(resourceName, "parameters.test", "testparameter"), - resource.TestCheckResourceAttr(resourceName, "parameters.test2", "testparameter2"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "2"), + resource.TestCheckResourceAttr(resourceName, "description", "test description2"), + resource.TestCheckResourceAttr(resourceName, "variables.%", "3"), ), }, { @@ -104,7 +132,7 @@ resource "azurerm_resource_group" "test" { location = "%s" } -resource "azurerm_data_factory_v2" "test" { +resource "azurerm_data_factory" "test" { name = "acctestdfv2%d" location = "${azurerm_resource_group.test.location}" resource_group_name = "${azurerm_resource_group.test.name}" @@ -113,23 +141,52 @@ resource "azurerm_data_factory_v2" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = "${azurerm_resource_group.test.name}" - data_factory_name = "${azurerm_data_factory_v2.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" +} +`, rInt, location, rInt, rInt) +} + +func testAccAzureRMDataFactoryPipeline_update1(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdfv2%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_pipeline" "test" { + name = "acctest%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + + annotations = ["test1", "test2", "test3"] + description = "test description" parameters = { - test = "testparameter" + test = "testparameter" + } + + variables { + "foo" = "test1" + "bar" = "test2" } } `, rInt, location, rInt, rInt) } -func testAccAzureRMDataFactoryPipeline_update(rInt int, location string) string { +func testAccAzureRMDataFactoryPipeline_update2(rInt int, location string) string { return fmt.Sprintf(` resource "azurerm_resource_group" "test" { name = "acctestrg-%d" location = "%s" } -resource "azurerm_data_factory_v2" "test" { +resource "azurerm_data_factory" "test" { name = "acctestdfv2%d" location = "${azurerm_resource_group.test.location}" resource_group_name = "${azurerm_resource_group.test.name}" @@ -138,11 +195,20 @@ resource "azurerm_data_factory_v2" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = "${azurerm_resource_group.test.name}" - data_factory_name = "${azurerm_data_factory_v2.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + annotations = ["test1", "test2"] + description = "test description2" + parameters = { - test = "testparameter" - test2 = "testparameter2" + test = "testparameter" + test2 = "testparameter2" + } + + variables { + "foo" = "test1" + "bar" = "test2" + "baz" = "test3" } } `, rInt, location, rInt, rInt) diff --git a/website/docs/r/data_factory_pipeline.html.markdown b/website/docs/r/data_factory_pipeline.html.markdown new file mode 100644 index 000000000000..832141075bbf --- /dev/null +++ b/website/docs/r/data_factory_pipeline.html.markdown @@ -0,0 +1,64 @@ +--- +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_factory_pipeline" +sidebar_current: "docs-azurerm-resource-data-factory-pipeline" +description: |- + Manage a Pipeline inside a Azure Data Factory. +--- + +# azurerm_data_factory_pipeline + +Manage a Pipeline inside a Azure Data Factory. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example" + location = "northeurope" +} + +resource "azurerm_data_factory" "example" { + name = "example" + location = "${azurerm_resource_group.example.location}" + resource_group_name = "${azurerm_resource_group.example.name}" +} + +resource "azurerm_data_factory_pipeline" "example" { + name = "example" + resource_group_name = "${azurerm_resource_group.example.name}" + data_factory_name = "${azurerm_data_factory.example.name}" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. + +* `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Pipeline. Changing this forces a new resource + +* `data_factory_name` - (Required) The Data Factory name in which to associate the Pipeline with. Changing this forces a new resource. + +* `description` - (Optional) The description for the Data Factory Pipeline. + +* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Pipeline. + +* `parameters` - (Optional) A map of parameters to associate with the Data Factory Pipeline. + +* `variables` - (Optional) A map of variables to associate with the Data Factory Pipeline. + +## Attributes Reference + +The following attributes are exported: + +* `id` - The ID of the Data Factory Pipeline. + +## Import + +Data Factory Pipeline can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_pipeline.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/pipelines/example +```