Skip to content

Commit 88c7a2f

Browse files
author
Gonzalo Arce
committed
Fix import of index analysis objects (#225)
Previously, when importing an existing index, the analysis configuration (analyzers, tokenizers, filters, char_filters, and normalizers) was not fully populated into the Terraform state. As a result, users would not see these analysis settings after import, leading to missing or incomplete configurations in state. This commit introduces logic to reconstruct nested analysis objects from the flattened `index.analysis.*` keys returned by OpenSearch on import. By converting these flattened keys back into a nested JSON structure, the imported index state now includes the analysis settings as users typically define them in their Terraform configuration. **Note**: This change may reveal differences for existing configurations if they rely on unquoted numeric values or trailing whitespace in analysis-related JSON. Such configurations may now produce diffs where they did not before, potentially resulting in forced replacements. Signed-off-by: Gonzalo Arce <[email protected]>
1 parent 790834e commit 88c7a2f

File tree

1 file changed

+60
-0
lines changed

1 file changed

+60
-0
lines changed

provider/resource_opensearch_index.go

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -802,6 +802,53 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error
802802

803803
indexResourceDataFromSettings(settings, d)
804804

805+
// Reconstruct analysis fields from flattened keys
806+
analysisData := map[string]map[string]interface{}{
807+
"analyzer": {},
808+
"tokenizer": {},
809+
"filter": {},
810+
"char_filter": {},
811+
"normalizer": {},
812+
}
813+
814+
for key, value := range settings {
815+
if strings.HasPrefix(key, "index.analysis.") {
816+
parts := strings.Split(strings.TrimPrefix(key, "index.analysis."), ".")
817+
if len(parts) < 2 {
818+
continue
819+
}
820+
821+
category := parts[0] // should be one of analyzer, tokenizer, filter, char_filter, normalizer
822+
if _, ok := analysisData[category]; !ok {
823+
continue
824+
}
825+
826+
subkeys := parts[1:]
827+
insertIntoNestedMap(analysisData[category], subkeys, value)
828+
}
829+
}
830+
831+
if len(analysisData["analyzer"]) > 0 {
832+
analyzerJSON, _ := json.Marshal(analysisData["analyzer"])
833+
d.Set("analysis_analyzer", string(analyzerJSON))
834+
}
835+
if len(analysisData["tokenizer"]) > 0 {
836+
tokenizerJSON, _ := json.Marshal(analysisData["tokenizer"])
837+
d.Set("analysis_tokenizer", string(tokenizerJSON))
838+
}
839+
if len(analysisData["filter"]) > 0 {
840+
filterJSON, _ := json.Marshal(analysisData["filter"])
841+
d.Set("analysis_filter", string(filterJSON))
842+
}
843+
if len(analysisData["char_filter"]) > 0 {
844+
charFilterJSON, _ := json.Marshal(analysisData["char_filter"])
845+
d.Set("analysis_char_filter", string(charFilterJSON))
846+
}
847+
if len(analysisData["normalizer"]) > 0 {
848+
normalizerJSON, _ := json.Marshal(analysisData["normalizer"])
849+
d.Set("analysis_normalizer", string(normalizerJSON))
850+
}
851+
805852
var response *json.RawMessage
806853
var res *elastic7.Response
807854
var mappingsResponse map[string]interface{}
@@ -846,6 +893,19 @@ func resourceOpensearchIndexRead(d *schema.ResourceData, meta interface{}) error
846893
return nil
847894
}
848895

896+
// This is used to rebuild nested analysis configuration (analyzers, tokenizers, filters, char_filters, normalizers)
897+
// from the flattened `index.analysis.*` keys returned by OpenSearch on import.
898+
func insertIntoNestedMap(m map[string]interface{}, keys []string, value interface{}) {
899+
if len(keys) == 1 {
900+
m[keys[0]] = value
901+
return
902+
}
903+
if _, ok := m[keys[0]].(map[string]interface{}); !ok {
904+
m[keys[0]] = map[string]interface{}{}
905+
}
906+
insertIntoNestedMap(m[keys[0]].(map[string]interface{}), keys[1:], value)
907+
}
908+
849909
func updateAliases(index string, oldAliases, newAliases map[string]interface{}, meta interface{}) error {
850910
ctx := context.Background()
851911

0 commit comments

Comments
 (0)