Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New Data Source: aws_msk_cluster #8743

Merged
merged 2 commits into from
May 23, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
120 changes: 120 additions & 0 deletions aws/data_source_aws_msk_cluster.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
package aws

import (
"fmt"

"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/kafka"
"github.com/hashicorp/terraform/helper/schema"
"github.com/hashicorp/terraform/helper/validation"
)

func dataSourceAwsMskCluster() *schema.Resource {
return &schema.Resource{
Read: dataSourceAwsMskClusterRead,

Schema: map[string]*schema.Schema{
"arn": {
Type: schema.TypeString,
Computed: true,
},
"bootstrap_brokers": {
Type: schema.TypeString,
Computed: true,
},
"cluster_name": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringLenBetween(1, 64),
},
"kafka_version": {
Type: schema.TypeString,
Computed: true,
},
"number_of_broker_nodes": {
Type: schema.TypeInt,
Computed: true,
},
"tags": tagsSchemaComputed(),
"zookeeper_connect_string": {
Type: schema.TypeString,
Computed: true,
},
},
}
}

func dataSourceAwsMskClusterRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).kafkaconn

listClustersInput := &kafka.ListClustersInput{
ClusterNameFilter: aws.String(d.Get("cluster_name").(string)),
}

var clusters []*kafka.ClusterInfo
for {
listClustersOutput, err := conn.ListClusters(listClustersInput)

if err != nil {
return fmt.Errorf("error listing MSK Clusters: %s", err)
}

if listClustersOutput == nil {
break
}

clusters = append(clusters, listClustersOutput.ClusterInfoList...)

if aws.StringValue(listClustersOutput.NextToken) == "" {
break
}

listClustersInput.NextToken = listClustersOutput.NextToken
}

if len(clusters) == 0 {
return fmt.Errorf("error reading MSK Cluster: no results found")
}

if len(clusters) > 1 {
return fmt.Errorf("error reading MSK Cluster: multiple results found, try adjusting search criteria")
}

cluster := clusters[0]

bootstrapBrokersInput := &kafka.GetBootstrapBrokersInput{
ClusterArn: cluster.ClusterArn,
}

bootstrapBrokersoOutput, err := conn.GetBootstrapBrokers(bootstrapBrokersInput)

if err != nil {
return fmt.Errorf("error reading MSK Cluster (%s) bootstrap brokers: %s", aws.StringValue(cluster.ClusterArn), err)
}

listTagsInput := &kafka.ListTagsForResourceInput{
ResourceArn: cluster.ClusterArn,
}

listTagsOutput, err := conn.ListTagsForResource(listTagsInput)

if err != nil {
return fmt.Errorf("error reading MSK Cluster (%s) tags: %s", aws.StringValue(cluster.ClusterArn), err)
}

d.Set("arn", aws.StringValue(cluster.ClusterArn))
d.Set("bootstrap_brokers", aws.StringValue(bootstrapBrokersoOutput.BootstrapBrokerString))
d.Set("cluster_name", aws.StringValue(cluster.ClusterName))
d.Set("kafka_version", aws.StringValue(cluster.CurrentBrokerSoftwareInfo.KafkaVersion))
d.Set("number_of_broker_nodes", aws.Int64Value(cluster.NumberOfBrokerNodes))

if err := d.Set("tags", tagsToMapMskCluster(listTagsOutput.Tags)); err != nil {
return fmt.Errorf("error setting tags: %s", err)
}

d.Set("zookeeper_connect_string", aws.StringValue(cluster.ZookeeperConnectString))

d.SetId(aws.StringValue(cluster.ClusterArn))

return nil
}
60 changes: 60 additions & 0 deletions aws/data_source_aws_msk_cluster_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
package aws

import (
"fmt"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
)

func TestAccAWSMskClusterDataSource_Name(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-test")
dataSourceName := "data.aws_msk_cluster.test"
resourceName := "aws_msk_cluster.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckMskClusterDestroy,
Steps: []resource.TestStep{
{
Config: testAccMskClusterDataSourceConfigName(rName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttrPair(resourceName, "arn", dataSourceName, "arn"),
resource.TestCheckResourceAttrSet(dataSourceName, "bootstrap_brokers"),
resource.TestCheckResourceAttrPair(resourceName, "cluster_name", dataSourceName, "cluster_name"),
resource.TestCheckResourceAttrPair(resourceName, "kafka_version", dataSourceName, "kafka_version"),
resource.TestCheckResourceAttrPair(resourceName, "number_of_broker_nodes", dataSourceName, "number_of_broker_nodes"),
resource.TestCheckResourceAttrPair(resourceName, "tags.%", dataSourceName, "tags.%"),
resource.TestCheckResourceAttrPair(resourceName, "zookeeper_connect_string", dataSourceName, "zookeeper_connect_string"),
),
},
},
})
}

func testAccMskClusterDataSourceConfigName(rName string) string {
return testAccMskClusterBaseConfig() + fmt.Sprintf(`
resource "aws_msk_cluster" "test" {
cluster_name = %[1]q
kafka_version = "2.1.0"
number_of_broker_nodes = 3
broker_node_group_info {
client_subnets = ["${aws_subnet.example_subnet_az1.id}", "${aws_subnet.example_subnet_az2.id}", "${aws_subnet.example_subnet_az3.id}"]
ebs_volume_size = 10
instance_type = "kafka.m5.large"
security_groups = ["${aws_security_group.example_sg.id}"]
}
tags = {
foo = "bar"
}
}
data "aws_msk_cluster" "test" {
cluster_name = "${aws_msk_cluster.test.cluster_name}"
}
`, rName)
}
1 change: 1 addition & 0 deletions aws/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ func Provider() terraform.ResourceProvider {
"aws_launch_configuration": dataSourceAwsLaunchConfiguration(),
"aws_launch_template": dataSourceAwsLaunchTemplate(),
"aws_mq_broker": dataSourceAwsMqBroker(),
"aws_msk_cluster": dataSourceAwsMskCluster(),
"aws_nat_gateway": dataSourceAwsNatGateway(),
"aws_network_acls": dataSourceAwsNetworkAcls(),
"aws_network_interface": dataSourceAwsNetworkInterface(),
Expand Down
3 changes: 3 additions & 0 deletions website/aws.erb
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,9 @@
<li>
<a href="/docs/providers/aws/d/mq_broker.html">aws_mq_broker</a>
</li>
<li>
<a href="/docs/providers/aws/d/msk_cluster.html">aws_msk_cluster</a>
</li>
<li>
<a href="/docs/providers/aws/d/nat_gateway.html">aws_nat_gateway</a>
</li>
Expand Down
36 changes: 36 additions & 0 deletions website/docs/d/msk_cluster.html.markdown
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
---
layout: "aws"
page_title: "AWS: aws_msk_cluster"
sidebar_current: "docs-aws-datasource-msk-cluster"
description: |-
Get information on an Amazon MSK Cluster
---

# Data Source: aws_msk_cluster

Get information on an Amazon MSK Cluster.

## Example Usage

```hcl
data "aws_msk_cluster" "example" {
cluster_name = "example"
}
```

## Argument Reference

The following arguments are supported:

* `name` - (Required) Name of the cluster.
bflad marked this conversation as resolved.
Show resolved Hide resolved

## Attribute Reference

In addition to all arguments above, the following attributes are exported:

* `arn` - Amazon Resource Name (ARN) of the MSK cluster.
* `bootstrap_brokers` - A comma separated list of one or more hostname:port pairs of Kafka brokers suitable to boostrap connectivity to the Kafka cluster.
* `kafka_version` - Apache Kafka version.
* `number_of_broker_nodes` - Number of broker nodes in the cluster.
* `tags` - Map of key-value pairs assigned to the cluster.
* `zookeeper_connect_string` - A comma separated list of one or more IP:port pairs to use to connect to the Apache Zookeeper cluster.