diff --git a/azurerm/provider.go b/azurerm/provider.go index 050f360c21a8..63c11927be1d 100644 --- a/azurerm/provider.go +++ b/azurerm/provider.go @@ -419,6 +419,7 @@ func Provider() terraform.ResourceProvider { "azurerm_stream_analytics_job": resourceArmStreamAnalyticsJob(), "azurerm_stream_analytics_function_javascript_udf": resourceArmStreamAnalyticsFunctionUDF(), "azurerm_stream_analytics_output_blob": resourceArmStreamAnalyticsOutputBlob(), + "azurerm_stream_analytics_output_mssql": resourceArmStreamAnalyticsOutputSql(), "azurerm_stream_analytics_output_eventhub": resourceArmStreamAnalyticsOutputEventHub(), "azurerm_stream_analytics_output_servicebus_queue": resourceArmStreamAnalyticsOutputServiceBusQueue(), "azurerm_stream_analytics_stream_input_blob": resourceArmStreamAnalyticsStreamInputBlob(), diff --git a/azurerm/resource_arm_stream_analytics_output_mssql.go b/azurerm/resource_arm_stream_analytics_output_mssql.go new file mode 100644 index 000000000000..62a1bd0b152c --- /dev/null +++ b/azurerm/resource_arm_stream_analytics_output_mssql.go @@ -0,0 +1,212 @@ +package azurerm + +import ( + "fmt" + "log" + + "github.com/Azure/azure-sdk-for-go/services/streamanalytics/mgmt/2016-03-01/streamanalytics" + "github.com/hashicorp/terraform/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/response" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmStreamAnalyticsOutputSql() *schema.Resource { + return &schema.Resource{ + Create: resourceArmStreamAnalyticsOutputSqlCreateUpdate, + Read: resourceArmStreamAnalyticsOutputSqlRead, + Update: resourceArmStreamAnalyticsOutputSqlCreateUpdate, + Delete: resourceArmStreamAnalyticsOutputSqlDelete, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "stream_analytics_job_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "resource_group_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "server": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "database": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "table": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "user": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "password": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + ValidateFunc: validate.NoEmptyStrings, + }, + }, + } +} + +func resourceArmStreamAnalyticsOutputSqlCreateUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).streamAnalyticsOutputsClient + ctx := meta.(*ArmClient).StopContext + + log.Printf("[INFO] Preparing arguments for Azure Stream Analytics SQL Output creation.") + name := d.Get("name").(string) + jobName := d.Get("stream_analytics_job_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if requireResourcesToBeImported && d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, jobName, name) + if err != nil && !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for existing Azure Stream Analytics SQL Output %q (Job %q / Resource Group %q): %s", name, jobName, resourceGroup, err) + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_stream_analytics_output_mssql", *existing.ID) + } + } + + server := d.Get("server").(string) + databaseName := d.Get("database").(string) + tableName := d.Get("table").(string) + sqlUser := d.Get("user").(string) + sqlUserPassword := d.Get("password").(string) + + props := streamanalytics.Output{ + Name: utils.String(name), + OutputProperties: &streamanalytics.OutputProperties{ + Datasource: &streamanalytics.AzureSQLDatabaseOutputDataSource{ + Type: streamanalytics.TypeMicrosoftSQLServerDatabase, + AzureSQLDatabaseOutputDataSourceProperties: &streamanalytics.AzureSQLDatabaseOutputDataSourceProperties{ + Server: utils.String(server), + Database: utils.String(databaseName), + User: utils.String(sqlUser), + Password: utils.String(sqlUserPassword), + Table: utils.String(tableName), + }, + }, + }, + } + + if d.IsNewResource() { + if _, err := client.CreateOrReplace(ctx, props, resourceGroup, jobName, name, "", ""); err != nil { + return fmt.Errorf("Error Creating Stream Analytics Output SQL %q (Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + + read, err := client.Get(ctx, resourceGroup, jobName, name) + if err != nil { + return fmt.Errorf("Error retrieving Stream Analytics Output SQL %q (Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + if read.ID == nil { + return fmt.Errorf("Cannot read ID of Stream Analytics Output SQL %q (Job %q / Resource Group %q)", name, jobName, resourceGroup) + } + + d.SetId(*read.ID) + } else { + if _, err := client.Update(ctx, props, resourceGroup, jobName, name, ""); err != nil { + return fmt.Errorf("Error Updating Stream Analytics Output SQL %q (Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + } + + return resourceArmStreamAnalyticsOutputSqlRead(d, meta) +} + +func resourceArmStreamAnalyticsOutputSqlRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).streamAnalyticsOutputsClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + jobName := id.Path["streamingjobs"] + name := id.Path["outputs"] + + resp, err := client.Get(ctx, resourceGroup, jobName, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[DEBUG] Output SQL %q was not found in Stream Analytics Job %q / Resource Group %q - removing from state!", name, jobName, resourceGroup) + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Stream Output SQL %q (Stream Analytics Job %q / Resource Group %q): %+v", name, jobName, resourceGroup, err) + } + + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + d.Set("stream_analytics_job_name", jobName) + + if props := resp.OutputProperties; props != nil { + v, ok := props.Datasource.AsAzureSQLDatabaseOutputDataSource() + if !ok { + return fmt.Errorf("Error converting Output Data Source to SQL Output: %+v", err) + } + + d.Set("server", v.Server) + d.Set("database", v.Database) + d.Set("table", v.Table) + d.Set("user", v.User) + + } + + return nil +} + +func resourceArmStreamAnalyticsOutputSqlDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).streamAnalyticsOutputsClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + jobName := id.Path["streamingjobs"] + name := id.Path["outputs"] + + if resp, err := client.Delete(ctx, resourceGroup, jobName, name); err != nil { + if !response.WasNotFound(resp.Response) { + return fmt.Errorf("Error deleting Output SQL %q (Stream Analytics Job %q / Resource Group %q) %+v", name, jobName, resourceGroup, err) + } + } + + return nil +} diff --git a/azurerm/resource_arm_stream_analytics_output_mssql_test.go b/azurerm/resource_arm_stream_analytics_output_mssql_test.go new file mode 100644 index 000000000000..611bf803a0e5 --- /dev/null +++ b/azurerm/resource_arm_stream_analytics_output_mssql_test.go @@ -0,0 +1,269 @@ +package azurerm + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" +) + +func TestAccAzureRMStreamAnalyticsOutputSql_basic(t *testing.T) { + resourceName := "azurerm_stream_analytics_output_mssql.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(5) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsOutputSqlDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsOutputSql_basic(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsOutputSqlExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "password", + }, + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsOutputSql_update(t *testing.T) { + resourceName := "azurerm_stream_analytics_output_mssql.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(5) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsOutputSqlDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsOutputSql_basic(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsOutputSqlExists(resourceName), + ), + }, + { + Config: testAccAzureRMStreamAnalyticsOutputSql_updated(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsOutputSqlExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + // not returned from the API + "password", + }, + }, + }, + }) +} + +func TestAccAzureRMStreamAnalyticsOutputSql_requiresImport(t *testing.T) { + if !requireResourcesToBeImported { + t.Skip("Skipping since resources aren't required to be imported") + return + } + + resourceName := "azurerm_stream_analytics_output_mssql.test" + ri := tf.AccRandTimeInt() + rs := acctest.RandString(5) + location := testLocation() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMStreamAnalyticsOutputSqlDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMStreamAnalyticsOutputSql_basic(ri, rs, location), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMStreamAnalyticsOutputSqlExists(resourceName), + ), + }, + { + Config: testAccAzureRMStreamAnalyticsOutputSql_requiresImport(ri, rs, location), + ExpectError: testRequiresImportError("azurerm_stream_analytics_output_mssql"), + }, + }, + }) +} + +func testCheckAzureRMStreamAnalyticsOutputSqlExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + name := rs.Primary.Attributes["name"] + jobName := rs.Primary.Attributes["stream_analytics_job_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + + conn := testAccProvider.Meta().(*ArmClient).streamAnalyticsOutputsClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := conn.Get(ctx, resourceGroup, jobName, name) + if err != nil { + return fmt.Errorf("Bad: Get on streamAnalyticsOutputsClient: %+v", err) + } + + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf("Bad: Stream Output %q (Stream Analytics Job %q / Resource Group %q) does not exist", name, jobName, resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMStreamAnalyticsOutputSqlDestroy(s *terraform.State) error { + conn := testAccProvider.Meta().(*ArmClient).streamAnalyticsOutputsClient + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_stream_analytics_output_mssql" { + continue + } + + name := rs.Primary.Attributes["name"] + jobName := rs.Primary.Attributes["stream_analytics_job_name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + ctx := testAccProvider.Meta().(*ArmClient).StopContext + resp, err := conn.Get(ctx, resourceGroup, jobName, name) + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Stream Analytics Output ServiceBus Queue still exists:\n%#v", resp.OutputProperties) + } + } + + return nil +} + +func testAccAzureRMStreamAnalyticsOutputSql_basic(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsOutputSql_template(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_output_mssql" "test" { + name = "acctestoutput-%d" + stream_analytics_job_name = "${azurerm_stream_analytics_job.test.name}" + resource_group_name = "${azurerm_stream_analytics_job.test.resource_group_name}" + + server = "${azurerm_sql_server.test.fully_qualified_domain_name}" + user = "${azurerm_sql_server.test.administrator_login}" + password = "${azurerm_sql_server.test.administrator_login_password}" + database = "${azurerm_sql_database.test.name}" + table = "AccTestTable" + +} +`, template, rInt) +} + +func testAccAzureRMStreamAnalyticsOutputSql_updated(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsOutputSql_template(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_output_mssql" "test" { + name = "acctestoutput-updated-%d" + stream_analytics_job_name = "${azurerm_stream_analytics_job.test.name}" + resource_group_name = "${azurerm_stream_analytics_job.test.resource_group_name}" + + server = "${azurerm_sql_server.test.fully_qualified_domain_name}" + user = "${azurerm_sql_server.test.administrator_login}" + password = "${azurerm_sql_server.test.administrator_login_password}" + database = "${azurerm_sql_database.test.name}" + table = "AccTestTable" +} +`, template, rInt) +} + +func testAccAzureRMStreamAnalyticsOutputSql_requiresImport(rInt int, rString string, location string) string { + template := testAccAzureRMStreamAnalyticsOutputSql_basic(rInt, rString, location) + return fmt.Sprintf(` +%s + +resource "azurerm_stream_analytics_output_mssql" "import" { + name = "${azurerm_stream_analytics_output_mssql.test.name}" + stream_analytics_job_name = "${azurerm_stream_analytics_output_mssql.test.stream_analytics_job_name}" + resource_group_name = "${azurerm_stream_analytics_output_mssql.test.resource_group_name}" + + server = "${azurerm_sql_server.test.fully_qualified_domain_name}" + user = "${azurerm_sql_server.test.administrator_login}" + password = "${azurerm_sql_server.test.administrator_login_password}" + database = "${azurerm_sql_database.test.name}" + table = "AccTestTable" + +} +`, template) +} + +func testAccAzureRMStreamAnalyticsOutputSql_template(rInt int, rString string, location string) string { + return fmt.Sprintf(` + +resource "azurerm_resource_group" "test" { + name = "acctestRG-%d" + location = "%s" +} + +resource "azurerm_sql_server" "test" { + name = "acctestserver-%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + version = "12.0" + administrator_login = "acctestadmin" + administrator_login_password = "t2RX8A76GrnE4EKC" +} + +resource "azurerm_sql_database" "test" { + name = "acctestdb" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + server_name = "${azurerm_sql_server.test.name}" + requested_service_objective_name = "S0" + collation = "SQL_LATIN1_GENERAL_CP1_CI_AS" + max_size_bytes = "268435456000" + create_mode = "Default" +} + +resource "azurerm_stream_analytics_job" "test" { + name = "acctestjob-%s" + resource_group_name = "${azurerm_resource_group.test.name}" + location = "${azurerm_resource_group.test.location}" + compatibility_level = "1.0" + data_locale = "en-GB" + events_late_arrival_max_delay_in_seconds = 60 + events_out_of_order_max_delay_in_seconds = 50 + events_out_of_order_policy = "Adjust" + output_error_policy = "Drop" + streaming_units = 3 + + transformation_query = <azurerm_stream_analytics_output_blob +
  • + azurerm_stream_analytics_output_mssql +
  • +
  • azurerm_stream_analytics_output_eventhub
  • diff --git a/website/docs/r/stream_analytics_output_mssql.html.markdown b/website/docs/r/stream_analytics_output_mssql.html.markdown new file mode 100644 index 000000000000..143556a07a43 --- /dev/null +++ b/website/docs/r/stream_analytics_output_mssql.html.markdown @@ -0,0 +1,88 @@ +--- +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_stream_analytics_output_mssql" +sidebar_current: "docs-azurerm-resource-stream-analytics-output-mssql" +description: |- + Manages a Stream Analytics Output to Microsoft SQL Server Database. +--- + +# azurerm_stream_analytics_output_mssql + +Manages a Stream Analytics Output to Microsoft SQL Server Database. + +## Example Usage + +```hcl +data "azurerm_resource_group" "example" { + name = "example-resources" +} + +data "azurerm_stream_analytics_job" "example" { + name = "example-job" + resource_group_name = "${azurerm_resource_group.example.name}" +} + +resource "azurerm_sql_server" "example" { + name = "example-server" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" + version = "12.0" + administrator_login = "dbadmin" + administrator_login_password = "example-password" +} + +resource "azurerm_sql_database" "example" { + name = "exampledb" + resource_group_name = "${azurerm_resource_group.example.name}" + location = "${azurerm_resource_group.example.location}" + server_name = "${azurerm_sql_server.example.name}" + requested_service_objective_name = "S0" + collation = "SQL_LATIN1_GENERAL_CP1_CI_AS" + max_size_bytes = "268435456000" + create_mode = "Default" +} + +resource "azurerm_stream_analytics_output_mssql" "example" { + name = "example-output-sql" + stream_analytics_job_name = "${azurerm_stream_analytics_job.example.name}" + resource_group_name = "${azurerm_stream_analytics_job.example.resource_group_name}" + + server = "${azurerm_sql_server.example.fully_qualified_domain_name}" + user = "${azurerm_sql_server.example.administrator_login}" + password = "${azurerm_sql_server.example.administrator_login_password}" + database = "${azurerm_sql_database.example.name}" + table = "ExampleTable" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The name of the Stream Output. Changing this forces a new resource to be created. + +* `resource_group_name` - (Required) The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created. + +* `stream_analytics_job_name` - (Required) The name of the Stream Analytics Job. Changing this forces a new resource to be created. + +* `server` - (Required) The SQL server url. Changing this forces a new resource to be created. + +* `user` - (Required) Username used to login to the Microsoft SQL Server. Changing this forces a new resource to be created. + +* `password` - (Required) Password used together with username, to login to the Microsoft SQL Server. Changing this forces a new resource to be created. + +* `table` - (Required) Table in the database that the output points to. Changing this forces a new resource to be created. + +## Attributes Reference + +The following attributes are exported in addition to the arguments listed above: + +* `id` - The ID of the Stream Analytics Output Microsoft SQL Server Database. + +## Import + +Stream Analytics Outputs to Microsoft SQL Server Database can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_stream_analytics_output_mssql.test /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/group1/providers/Microsoft.StreamAnalytics/streamingjobs/job1/outputs/output1 +```