Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix import of index analysis objects (#225) #227

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions provider/resource_opensearch_index.go
Original file line number Diff line number Diff line change
Expand Up @@ -802,6 +802,53 @@

indexResourceDataFromSettings(settings, d)

// Reconstruct analysis fields from flattened keys
analysisData := map[string]map[string]interface{}{
"analyzer": {},

Check failure on line 807 in provider/resource_opensearch_index.go

View workflow job for this annotation

GitHub Actions / Runs go linters

File is not `gofmt`-ed with `-s` (gofmt)
"tokenizer": {},
"filter": {},
"char_filter": {},
"normalizer": {},
}

for key, value := range settings {
if strings.HasPrefix(key, "index.analysis.") {
parts := strings.Split(strings.TrimPrefix(key, "index.analysis."), ".")
if len(parts) < 2 {
continue
}

category := parts[0] // should be one of analyzer, tokenizer, filter, char_filter, normalizer
if _, ok := analysisData[category]; !ok {
continue
}

subkeys := parts[1:]
insertIntoNestedMap(analysisData[category], subkeys, value)
}
}

if len(analysisData["analyzer"]) > 0 {
analyzerJSON, _ := json.Marshal(analysisData["analyzer"])
d.Set("analysis_analyzer", string(analyzerJSON))

Check failure on line 833 in provider/resource_opensearch_index.go

View workflow job for this annotation

GitHub Actions / Runs go linters

Error return value of `d.Set` is not checked (errcheck)
}
if len(analysisData["tokenizer"]) > 0 {
tokenizerJSON, _ := json.Marshal(analysisData["tokenizer"])
d.Set("analysis_tokenizer", string(tokenizerJSON))

Check failure on line 837 in provider/resource_opensearch_index.go

View workflow job for this annotation

GitHub Actions / Runs go linters

Error return value of `d.Set` is not checked (errcheck)
}
if len(analysisData["filter"]) > 0 {
filterJSON, _ := json.Marshal(analysisData["filter"])
d.Set("analysis_filter", string(filterJSON))

Check failure on line 841 in provider/resource_opensearch_index.go

View workflow job for this annotation

GitHub Actions / Runs go linters

Error return value of `d.Set` is not checked (errcheck)
}
if len(analysisData["char_filter"]) > 0 {
charFilterJSON, _ := json.Marshal(analysisData["char_filter"])
d.Set("analysis_char_filter", string(charFilterJSON))
}
if len(analysisData["normalizer"]) > 0 {
normalizerJSON, _ := json.Marshal(analysisData["normalizer"])
d.Set("analysis_normalizer", string(normalizerJSON))
}

var response *json.RawMessage
var res *elastic7.Response
var mappingsResponse map[string]interface{}
Expand Down Expand Up @@ -846,6 +893,19 @@
return nil
}

// This is used to rebuild nested analysis configuration (analyzers, tokenizers, filters, char_filters, normalizers)
// from the flattened `index.analysis.*` keys returned by OpenSearch on import.
func insertIntoNestedMap(m map[string]interface{}, keys []string, value interface{}) {
if len(keys) == 1 {
m[keys[0]] = value
return
}
if _, ok := m[keys[0]].(map[string]interface{}); !ok {
m[keys[0]] = map[string]interface{}{}
}
insertIntoNestedMap(m[keys[0]].(map[string]interface{}), keys[1:], value)
}

func updateAliases(index string, oldAliases, newAliases map[string]interface{}, meta interface{}) error {
ctx := context.Background()

Expand Down
87 changes: 87 additions & 0 deletions provider/resource_opensearch_index_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,52 @@ resource "opensearch_index" "test" {

depends_on = [opensearch_index_template.test]
}
`
testAccOpensearchIndexImportAnalysis = `
resource "opensearch_index" "test_import_analysis" {
name = "terraform-test-import-analysis"
number_of_shards = 1
number_of_replicas = 1

analysis_analyzer = jsonencode({
custom_analyzer = {
type = "custom"
tokenizer = "standard"
filter = ["lowercase", "asciifolding"]
}
})

analysis_filter = jsonencode({
my_shingle_filter = {
type = "shingle"
max_shingle_size = 2
min_shingle_size = 2
output_unigrams = false
}
})

analysis_tokenizer = jsonencode({
my_ngram_tokenizer = {
type = "ngram"
min_gram = "3"
max_gram = "4"
}
})

analysis_char_filter = jsonencode({
my_char_filter_apostrophe = {
type = "mapping"
mappings = ["'=>"]
}
})

analysis_normalizer = jsonencode({
my_normalizer = {
type = "custom"
filter = ["lowercase", "asciifolding"]
}
})
}
`
)

Expand Down Expand Up @@ -788,3 +834,44 @@ func checkOpensearchAliasDeleted(indexName, aliasName string) resource.TestCheck
return nil
}
}

func TestAccOpensearchIndex_importAnalysis(t *testing.T) {
resourceName := "opensearch_index.test_import_analysis"
indexName := "terraform-test-import-analysis"

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: checkOpensearchIndexDestroy,
Steps: []resource.TestStep{
// Step 1: Create the index with analysis fields
{
Config: testAccOpensearchIndexImportAnalysis,
Check: resource.ComposeTestCheckFunc(
checkOpensearchIndexExists(resourceName),
),
},
// Step 2: Import the index
{
ResourceName: resourceName,
ImportState: true,
ImportStateId: indexName,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{
"force_destroy",
},
},
// Step 3: Re-run the same config and ensure no diffs appear
{
Config: testAccOpensearchIndexImportAnalysis,
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "analysis_analyzer", `{"custom_analyzer":{"filter":["lowercase","asciifolding"],"tokenizer":"standard","type":"custom"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_filter", `{"my_shingle_filter":{"max_shingle_size":"2","min_shingle_size":"2","output_unigrams":false,"type":"shingle"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_tokenizer", `{"my_ngram_tokenizer":{"max_gram":"4","min_gram":"3","type":"ngram"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_char_filter", `{"my_char_filter_apostrophe":{"mappings":["'=>"],"type":"mapping"}}`),
resource.TestCheckResourceAttr(resourceName, "analysis_normalizer", `{"my_normalizer":{"filter":["lowercase","asciifolding"],"type":"custom"}}`),
),
},
},
})
}
Loading