Skip to content

Commit

Permalink
d/hdinsight_cluster: fixing an issue where the tier and kind were…
Browse files Browse the repository at this point in the history
… lower-cased due to an issue in the API Spec

The API Specification defines `tier` as a Constant which can be recased - however the `kind` is defined as a string, thus
can't be. I've opened hashicorp/pandora#3404 (to workaround it) and Azure/azure-rest-api-specs#26838
(to fix it) - once those are threaded through both of these should be normalized.
  • Loading branch information
tombuildsstuff committed Nov 30, 2023
1 parent 31cf747 commit 0255d5b
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 18 deletions.
7 changes: 1 addition & 6 deletions internal/services/hdinsight/hdinsight_cluster_data_source.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ package hdinsight

import (
"fmt"
"strings"
"time"

"github.com/hashicorp/go-azure-helpers/lang/pointer"
Expand Down Expand Up @@ -157,11 +156,7 @@ func dataSourceHDInsightClusterRead(d *pluginsdk.ResourceData, meta interface{})
d.Set("tls_min_version", props.MinSupportedTlsVersion)

d.Set("component_versions", flattenHDInsightsDataSourceComponentVersions(props.ClusterDefinition.ComponentVersion))
kind := ""
if props.ClusterDefinition.Kind != nil {
kind = strings.ToLower(*props.ClusterDefinition.Kind) // TODO: investigate OOB why this is ToLowered, missing Constants?
}
d.Set("kind", kind)
d.Set("kind", pointer.From(props.ClusterDefinition.Kind))
if err := d.Set("gateway", FlattenHDInsightsConfigurations(configuration, d)); err != nil {
return fmt.Errorf("flattening `gateway`: %+v", err)
}
Expand Down
24 changes: 12 additions & 12 deletions internal/services/hdinsight/hdinsight_cluster_data_source_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ func TestAccDataSourceHDInsightCluster_hadoop(t *testing.T) {
{
Config: r.hadoop(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).Key("kind").HasValue("hadoop"),
check.That(data.ResourceName).Key("tier").HasValue("standard"),
check.That(data.ResourceName).Key("kind").HasValue("HADOOP"),
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
check.That(data.ResourceName).Key("https_endpoint").Exists(),
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
Expand All @@ -37,8 +37,8 @@ func TestAccDataSourceHDInsightCluster_hbase(t *testing.T) {
{
Config: r.hbase(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).Key("kind").HasValue("hbase"),
check.That(data.ResourceName).Key("tier").HasValue("standard"),
check.That(data.ResourceName).Key("kind").HasValue("HBASE"),
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
check.That(data.ResourceName).Key("https_endpoint").Exists(),
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
Expand All @@ -54,8 +54,8 @@ func TestAccDataSourceHDInsightCluster_interactiveQuery(t *testing.T) {
{
Config: r.interactiveQuery(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).Key("kind").HasValue("interactivehive"),
check.That(data.ResourceName).Key("tier").HasValue("standard"),
check.That(data.ResourceName).Key("kind").HasValue("INTERACTIVEHIVE"),
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
check.That(data.ResourceName).Key("https_endpoint").Exists(),
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
Expand All @@ -71,8 +71,8 @@ func TestAccDataSourceHDInsightCluster_kafka(t *testing.T) {
{
Config: r.kafka(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).Key("kind").HasValue("kafka"),
check.That(data.ResourceName).Key("tier").HasValue("standard"),
check.That(data.ResourceName).Key("kind").HasValue("KAFKA"),
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
check.That(data.ResourceName).Key("https_endpoint").Exists(),
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
Expand All @@ -88,8 +88,8 @@ func TestAccDataSourceHDInsightCluster_kafkaWithRestProxy(t *testing.T) {
{
Config: r.kafkaWithRestProxy(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).Key("kind").HasValue("kafka"),
check.That(data.ResourceName).Key("tier").HasValue("standard"),
check.That(data.ResourceName).Key("kind").HasValue("KAFKA"),
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
check.That(data.ResourceName).Key("https_endpoint").Exists(),
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
Expand All @@ -106,8 +106,8 @@ func TestAccDataSourceHDInsightCluster_spark(t *testing.T) {
{
Config: r.spark(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).Key("kind").HasValue("spark"),
check.That(data.ResourceName).Key("tier").HasValue("standard"),
check.That(data.ResourceName).Key("kind").HasValue("SPARK"),
check.That(data.ResourceName).Key("tier").HasValue("Standard"),
check.That(data.ResourceName).Key("edge_ssh_endpoint").HasValue(""),
check.That(data.ResourceName).Key("https_endpoint").Exists(),
check.That(data.ResourceName).Key("ssh_endpoint").Exists(),
Expand Down

0 comments on commit 0255d5b

Please sign in to comment.