Skip to content

Commit

Permalink
Fix import of index analysis objects (opensearch-project#225)
Browse files Browse the repository at this point in the history
Previously, when importing an existing index, the analysis configuration
(analyzers, tokenizers, filters, char_filters, and normalizers) was not
fully populated into the Terraform state. As a result, users would not see
these analysis settings after import, leading to missing or incomplete
configurations in state.

This commit introduces logic to reconstruct nested analysis objects from
the flattened `index.analysis.*` keys returned by OpenSearch on import. By
converting these flattened keys back into a nested JSON structure, the
imported index state now includes the analysis settings as users typically
define them in their Terraform configuration.

**Note**: This change may reveal differences for existing configurations if
they rely on unquoted numeric values or trailing whitespace in
analysis-related JSON. Such configurations may now produce diffs where they
did not before, potentially resulting in forced replacements.

Issues Resolved
  • Loading branch information
Gonzalo Arce committed Dec 11, 2024
1 parent 790834e commit 866d94c
Showing 1 changed file with 60 additions and 1 deletion.
61 changes: 60 additions & 1 deletion provider/resource_opensearch_index.go
Original file line number Diff line number Diff line change
Expand Up @@ -802,6 +802,53 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error

indexResourceDataFromSettings(settings, d)

// RECONSTRUCT ANALYSIS FIELDS FROM FLATTENED KEYS
analysisData := map[string]map[string]interface{}{
"analyzer": {},
"tokenizer": {},
"filter": {},
"char_filter": {},
"normalizer": {},
}

for key, value := range settings {
if strings.HasPrefix(key, "index.analysis.") {
parts := strings.Split(strings.TrimPrefix(key, "index.analysis."), ".")
if len(parts) < 2 {
continue
}

category := parts[0] // should be one of analyzer, tokenizer, filter, char_filter, normalizer
if _, ok := analysisData[category]; !ok {
continue
}

subkeys := parts[1:]
insertIntoNestedMap(analysisData[category], subkeys, value)
}
}

if len(analysisData["analyzer"]) > 0 {
analyzerJSON, _ := json.Marshal(analysisData["analyzer"])
d.Set("analysis_analyzer", string(analyzerJSON))
}
if len(analysisData["tokenizer"]) > 0 {
tokenizerJSON, _ := json.Marshal(analysisData["tokenizer"])
d.Set("analysis_tokenizer", string(tokenizerJSON))
}
if len(analysisData["filter"]) > 0 {
filterJSON, _ := json.Marshal(analysisData["filter"])
d.Set("analysis_filter", string(filterJSON))
}
if len(analysisData["char_filter"]) > 0 {
charFilterJSON, _ := json.Marshal(analysisData["char_filter"])
d.Set("analysis_char_filter", string(charFilterJSON))
}
if len(analysisData["normalizer"]) > 0 {
normalizerJSON, _ := json.Marshal(analysisData["normalizer"])
d.Set("analysis_normalizer", string(normalizerJSON))
}

var response *json.RawMessage
var res *elastic7.Response
var mappingsResponse map[string]interface{}
Expand Down Expand Up @@ -838,14 +885,26 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error
}

err = d.Set("mappings", string(jsonString))

if err != nil {
return err
}

return nil
}

// This is used to rebuild nested analysis configuration (analyzers, tokenizers, filters, char_filters, normalizers)
// from the flattened `index.analysis.*` keys returned by OpenSearch on import.
func insertIntoNestedMap(m map[string]interface{}, keys []string, value interface{}) {
if len(keys) == 1 {
m[keys[0]] = value
return
}
if _, ok := m[keys[0]].(map[string]interface{}); !ok {
m[keys[0]] = map[string]interface{}{}
}
insertIntoNestedMap(m[keys[0]].(map[string]interface{}), keys[1:], value)
}

func updateAliases(index string, oldAliases, newAliases map[string]interface{}, meta interface{}) error {
ctx := context.Background()

Expand Down

0 comments on commit 866d94c

Please sign in to comment.