Skip to content

Commit e0e1639

Browse files
authored
Merge branch 'main' into fix-changelog
Signed-off-by: Ankit Jain <[email protected]>
2 parents 87efd71 + 3db6175 commit e0e1639

File tree

29 files changed

+824
-82
lines changed

29 files changed

+824
-82
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
## [Unreleased 3.x]
77
### Added
88
- Use Lucene `pack` method for `half_float` and `usigned_long` when using `ApproximatePointRangeQuery`.
9+
- Add a mapper for context aware segments grouping criteria ([#19233](https://github.com/opensearch-project/OpenSearch/pull/19233))
910

1011
### Changed
1112
- Refactor to move prepareIndex and prepareDelete methods to Engine class ([#19551](https://github.com/opensearch-project/OpenSearch/pull/19551))
1213

1314
### Fixed
15+
- Fix flaky test FieldDataLoadingIT.testIndicesFieldDataCacheSizeSetting ([#19571](https://github.com/opensearch-project/OpenSearch/pull/19571))
1416

1517
### Dependencies
1618
- Bump `org.apache.zookeeper:zookeeper` from 3.9.3 to 3.9.4 ([#19535](https://github.com/opensearch-project/OpenSearch/pull/19535))

TESTING.md

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,13 +80,16 @@ To run OpenSearch in debug mode,
8080

8181
This will instruct all JVMs (including any that run cli tools such as creating the keyring or adding users) to suspend and initiate a debug connection on port incrementing from `5005`. As such, the IDE needs to be instructed to listen for connections on this port. Since we might run multiple JVMs as part of configuring and starting the cluster, it's recommended to configure the IDE to initiate multiple listening attempts. In case of IntelliJ, this option is called "Auto restart" and needs to be checked. In case of Eclipse, "Connection limit" setting needs to be configured with a greater value (ie 10 or more).
8282

83+
Alternately, you can configure your OpenSearch JVM to listen as a debug server on port `5005`, and attach a debugger IDE once opensearch JVM is up and running. Use `./gradlew run --debug-server-jvm` for this debugging setup.
84+
8385
### Other useful arguments
8486

8587
- In order to start a node with a different max heap space add: `-Dtests.heap.size=4G`
8688
- In order to disable assertions add: `-Dtests.asserts=false`
8789
- In order to use a custom data directory: `--data-dir=/tmp/foo`
8890
- In order to preserve data in between executions: `--preserve-data`
89-
- In order to remotely attach a debugger to the process: `--debug-jvm`
91+
- In order to start opensearch as a debug server and remotely attach a debugger client (like an IDE debugger): `--debug-server-jvm`
92+
- In order to start and attach opensearch process to an existing debug server: `--debug-jvm`
9093
- In order to set a different keystore password: `--keystore-password yourpassword`
9194
- In order to set an OpenSearch setting, provide a setting with the following prefix: `-Dtests.opensearch.`
9295
- In order to enable stack trace of the MockSpanData during testing, add: `-Dtests.telemetry.span.stack_traces=true` (Storing stack traces alongside span data can be useful for comprehensive debugging and performance optimization during testing, as it provides insights into the exact code paths and execution sequences, facilitating efficient issue identification and resolution. Note: Enabling this might lead to OOM issues while running ITs)

modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@
5959
import org.opensearch.repositories.RepositoriesService;
6060
import org.opensearch.rest.RestController;
6161
import org.opensearch.rest.RestHandler;
62+
import org.opensearch.script.ContextAwareGroupingScript;
6263
import org.opensearch.script.DerivedFieldScript;
6364
import org.opensearch.script.IngestScript;
6465
import org.opensearch.script.ScoreScript;
@@ -120,6 +121,9 @@ public final class PainlessModulePlugin extends Plugin implements ScriptPlugin,
120121
derived.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.derived.txt"));
121122
map.put(DerivedFieldScript.CONTEXT, derived);
122123

124+
// Only basic painless support for ContextAwareGrouping script
125+
map.put(ContextAwareGroupingScript.CONTEXT, new ArrayList<>(Allowlist.BASE_ALLOWLISTS));
126+
123127
allowlists = map;
124128
}
125129

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
/*
2+
* SPDX-License-Identifier: Apache-2.0
3+
*
4+
* The OpenSearch Contributors require contributions made to
5+
* this file be licensed under the Apache-2.0 license or a
6+
* compatible open source license.
7+
*/
8+
9+
package org.opensearch.painless;
10+
11+
import org.opensearch.common.settings.Settings;
12+
import org.opensearch.painless.spi.Allowlist;
13+
import org.opensearch.script.ContextAwareGroupingScript;
14+
import org.opensearch.script.ScriptContext;
15+
16+
import java.util.ArrayList;
17+
import java.util.Collections;
18+
import java.util.List;
19+
import java.util.Map;
20+
21+
public class ContextAwareGroupingScriptTests extends ScriptTestCase {
22+
23+
private static PainlessScriptEngine SCRIPT_ENGINE;
24+
25+
@Override
26+
public void setUp() throws Exception {
27+
super.setUp();
28+
29+
Map<ScriptContext<?>, List<Allowlist>> contexts = newDefaultContexts();
30+
List<Allowlist> allowlists = new ArrayList<>(Allowlist.BASE_ALLOWLISTS);
31+
contexts.put(ContextAwareGroupingScript.CONTEXT, allowlists);
32+
33+
SCRIPT_ENGINE = new PainlessScriptEngine(Settings.EMPTY, contexts);
34+
}
35+
36+
@Override
37+
public void tearDown() throws Exception {
38+
super.tearDown();
39+
SCRIPT_ENGINE = null;
40+
}
41+
42+
@Override
43+
protected PainlessScriptEngine getEngine() {
44+
return SCRIPT_ENGINE;
45+
}
46+
47+
public void testContextAwareGroupingScript() {
48+
String stringConcat = "ctx.value + \"-context-aware\"";
49+
ContextAwareGroupingScript script = compile(stringConcat);
50+
51+
assertEquals("value-context-aware", script.execute(Map.of("value", "value")));
52+
53+
String integerAddition = "String.valueOf(ctx.value / 100)";
54+
ContextAwareGroupingScript integerAdditionScript = compile(integerAddition);
55+
assertEquals("2", integerAdditionScript.execute(Map.of("value", 200)));
56+
}
57+
58+
private ContextAwareGroupingScript compile(String expression) {
59+
ContextAwareGroupingScript.Factory factory = getEngine().compile(
60+
expression,
61+
expression,
62+
ContextAwareGroupingScript.CONTEXT,
63+
Collections.emptyMap()
64+
);
65+
return factory.newInstance();
66+
}
67+
}

modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/71_context_api.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
- do:
33
scripts_painless_context: {}
44
- match: { contexts.0: aggregation_selector}
5-
- match: { contexts.24: update}
5+
- match: { contexts.25: update}
66
---
77

88
"Action to get all API values for score context":

server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ public class FieldDataLoadingIT extends OpenSearchIntegTestCase {
5959
protected Settings nodeSettings(int nodeOrdinal) {
6060
return Settings.builder()
6161
.put(super.nodeSettings(nodeOrdinal))
62-
.put(IndicesService.INDICES_CACHE_CLEAN_INTERVAL_SETTING.getKey(), "1s")
62+
.put(IndicesService.INDICES_CACHE_CLEAN_INTERVAL_SETTING.getKey(), "1ms")
6363
.build();
6464
}
6565

@@ -134,6 +134,7 @@ public void testIndicesFieldDataCacheSizeSetting() throws Exception {
134134
}
135135

136136
private void createIndex(String index, int numFieldsPerIndex, String fieldPrefix) throws Exception {
137+
assert numFieldsPerIndex >= 1;
137138
XContentBuilder req = jsonBuilder().startObject().startObject("properties");
138139
for (int j = 0; j < numFieldsPerIndex; j++) {
139140
req.startObject(fieldPrefix + j).field("type", "text").field("fielddata", true).endObject();
@@ -146,6 +147,17 @@ private void createIndex(String index, int numFieldsPerIndex, String fieldPrefix
146147
}
147148
client().prepareIndex(index).setId("1").setSource(source).get();
148149
client().admin().indices().prepareRefresh(index).get();
150+
151+
// Put something into the cache and clear it, waiting for stats to return to 0.
152+
// Index creation temporarily opens + closes a test index using IndicesService.withTempIndexService()
153+
// that has the same name as the real index,
154+
// and this ensures the clear resulting from that close has completed before we go to the actual test.
155+
client().prepareSearch(index).setQuery(new MatchAllQueryBuilder()).addSort(fieldPrefix + "0", SortOrder.ASC).get();
156+
client().admin().indices().clearCache(new ClearIndicesCacheRequest().fieldDataCache(true)).actionGet();
157+
assertBusy(() -> {
158+
ClusterStatsResponse clearedResponse = client().admin().cluster().prepareClusterStats().get();
159+
assertEquals(0, clearedResponse.getIndicesStats().getFieldData().getMemorySizeInBytes());
160+
});
149161
}
150162

151163
public void testFieldDataCacheClearConcurrentIndices() throws Exception {
@@ -231,19 +243,12 @@ public void testFieldDataCacheClearConcurrentFields() throws Exception {
231243
private void createAndSearchIndices(int numIndices, int numFieldsPerIndex, String indexPrefix, String fieldPrefix) throws Exception {
232244
for (int i = 0; i < numIndices; i++) {
233245
String index = indexPrefix + i;
234-
XContentBuilder req = jsonBuilder().startObject().startObject("properties");
235-
for (int j = 0; j < numFieldsPerIndex; j++) {
236-
req.startObject(fieldPrefix + j).field("type", "text").field("fielddata", true).endObject();
237-
}
238-
req.endObject().endObject();
239-
assertAcked(prepareCreate(index).setMapping(req));
240-
Map<String, String> source = new HashMap<>();
241-
for (int j = 0; j < numFieldsPerIndex; j++) {
242-
source.put(fieldPrefix + j, "value");
243-
}
244-
client().prepareIndex(index).setId("1").setSource(source).get();
245-
client().admin().indices().prepareRefresh(index).get();
246+
createIndex(index, numFieldsPerIndex, fieldPrefix);
247+
}
248+
// Separate loop to ensure createIndex() handles any cache wipe from opening+closing the temporary test index
249+
for (int i = 0; i < numIndices; i++) {
246250
// Search on each index to fill the cache
251+
String index = indexPrefix + i;
247252
for (int j = 0; j < numFieldsPerIndex; j++) {
248253
client().prepareSearch(index).setQuery(new MatchAllQueryBuilder()).addSort(fieldPrefix + j, SortOrder.ASC).get();
249254
}

server/src/main/java/org/opensearch/index/mapper/CompositeMappedFieldType.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,8 @@ public CompositeMappedFieldType(String name, List<String> fields, CompositeField
5050
*/
5151
@ExperimentalApi
5252
public enum CompositeFieldType {
53-
STAR_TREE("star_tree");
53+
STAR_TREE("star_tree"),
54+
CONTEXT_AWARE_GROUPING("context_aware_grouping");
5455

5556
private final String name;
5657

Lines changed: 184 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,184 @@
1+
/*
2+
* SPDX-License-Identifier: Apache-2.0
3+
*
4+
* The OpenSearch Contributors require contributions made to
5+
* this file be licensed under the Apache-2.0 license or a
6+
* compatible open source license.
7+
*/
8+
9+
package org.opensearch.index.mapper;
10+
11+
import org.opensearch.script.ContextAwareGroupingScript;
12+
import org.opensearch.script.Script;
13+
14+
import java.io.IOException;
15+
import java.util.Collections;
16+
import java.util.List;
17+
import java.util.Map;
18+
import java.util.Set;
19+
import java.util.stream.Collectors;
20+
21+
import static org.opensearch.script.Script.DEFAULT_SCRIPT_LANG;
22+
23+
/**
24+
* A field mapper to specify context aware grouping mapper creation
25+
*
26+
* @opensearch.internal
27+
*/
28+
public class ContextAwareGroupingFieldMapper extends ParametrizedFieldMapper {
29+
30+
public static final String CONTENT_TYPE = "context_aware_grouping";
31+
32+
public static final Mapper.TypeParser PARSER = new TypeParser();
33+
34+
private static class TypeParser implements Mapper.TypeParser {
35+
36+
@Override
37+
public Mapper.Builder<?> parse(String name, Map<String, Object> node, ParserContext context) throws MapperParsingException {
38+
throw new IllegalStateException("ContextAwareGroupingFieldMapper needs objbuilder to validate node");
39+
}
40+
41+
@Override
42+
public Mapper.Builder<?> parse(String name, Map<String, Object> node, ParserContext context, ObjectMapper.Builder objBuilder)
43+
throws MapperParsingException {
44+
Builder builder = new Builder(name);
45+
builder.parse(name, context, node);
46+
47+
if (builder.fields.isConfigured() == false) {
48+
throw new MapperParsingException("[fields] in context_aware_grouping is required");
49+
}
50+
51+
Set<String> propertyFieldNames = (Set<String>) objBuilder.mappersBuilders.stream()
52+
.map(b -> ((Mapper.Builder) b).name())
53+
.collect(Collectors.toSet());
54+
55+
if (propertyFieldNames.containsAll(builder.fields.getValue()) == false) {
56+
throw new MapperParsingException(
57+
"[fields] should be from properties: [" + propertyFieldNames + "] but found [" + builder.fields.getValue() + "]"
58+
);
59+
}
60+
61+
final Script s = builder.script.getValue();
62+
if (s != null) {
63+
ContextAwareGroupingScript.Factory factory = context.scriptService()
64+
.compile(builder.script.get(), ContextAwareGroupingScript.CONTEXT);
65+
builder.compiledScript = factory.newInstance();
66+
}
67+
return builder;
68+
}
69+
}
70+
71+
/**
72+
* Builder for this field mapper
73+
*
74+
* @opensearch.internal
75+
*/
76+
public static class Builder extends ParametrizedFieldMapper.Builder {
77+
78+
private static ContextAwareGroupingFieldMapper toType(FieldMapper in) {
79+
return (ContextAwareGroupingFieldMapper) in;
80+
}
81+
82+
private final Parameter<List<String>> fields = new Parameter<>("fields", true, Collections::emptyList, (n, c, o) -> {
83+
if (!(o instanceof List)) {
84+
throw new MapperParsingException("Expected [fields] to be a list of strings but got [" + o + "]");
85+
}
86+
87+
List<String> fields = (List<String>) o;
88+
if (fields.isEmpty()) {
89+
throw new MapperParsingException("Expected [fields] in context_aware_grouping to have one value");
90+
}
91+
92+
if (fields.size() > 1) {
93+
throw new MapperParsingException("Currently [fields] in context_aware_grouping does not support multiple values");
94+
}
95+
96+
return fields;
97+
}, m -> toType(m).fields);
98+
99+
private final Parameter<Script> script = new Parameter<>("script", true, () -> null, (n, c, o) -> {
100+
if (o == null) {
101+
return null;
102+
}
103+
104+
Script s = Script.parse(o);
105+
if (!s.getLang().equals(DEFAULT_SCRIPT_LANG)) {
106+
throw new MapperParsingException("context_aware_grouping only supports painless script");
107+
}
108+
return s;
109+
}, m -> toType(m).script).acceptsNull();
110+
111+
private ContextAwareGroupingScript compiledScript;
112+
113+
/**
114+
* Creates a new Builder with a field name
115+
*
116+
* @param name
117+
*/
118+
protected Builder(String name) {
119+
super(name);
120+
}
121+
122+
protected Builder(String name, List<String> fields, Script script, ContextAwareGroupingScript contextAwareGroupingScript) {
123+
super(name);
124+
this.fields.setValue(fields);
125+
this.script.setValue(script);
126+
this.compiledScript = contextAwareGroupingScript;
127+
}
128+
129+
@Override
130+
protected List<Parameter<?>> getParameters() {
131+
return List.of(fields, script);
132+
}
133+
134+
@Override
135+
public ParametrizedFieldMapper build(BuilderContext context) {
136+
final ContextAwareGroupingFieldType contextAwareGroupingFieldType = new ContextAwareGroupingFieldType(
137+
this.fields.getValue(),
138+
this.compiledScript
139+
);
140+
return new ContextAwareGroupingFieldMapper(name, contextAwareGroupingFieldType, this);
141+
}
142+
}
143+
144+
private final List<String> fields;
145+
private final Script script;
146+
private final ContextAwareGroupingScript compiledScript;
147+
148+
/**
149+
* Creates a new ParametrizedFieldMapper
150+
*
151+
* @param simpleName
152+
* @param mappedFieldType
153+
* @param builder
154+
*/
155+
protected ContextAwareGroupingFieldMapper(
156+
String simpleName,
157+
ContextAwareGroupingFieldType mappedFieldType,
158+
ContextAwareGroupingFieldMapper.Builder builder
159+
) {
160+
super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty());
161+
this.fields = builder.fields.getValue();
162+
this.script = builder.script.getValue();
163+
this.compiledScript = builder.compiledScript;
164+
}
165+
166+
@Override
167+
public Builder getMergeBuilder() {
168+
return new Builder(CONTENT_TYPE, this.fields, this.script, this.compiledScript);
169+
}
170+
171+
@Override
172+
protected void parseCreateField(ParseContext context) throws IOException {
173+
throw new MapperParsingException("context_aware_grouping cannot be ingested in the document");
174+
}
175+
176+
public ContextAwareGroupingFieldType fieldType() {
177+
return (ContextAwareGroupingFieldType) mappedFieldType;
178+
}
179+
180+
@Override
181+
protected String contentType() {
182+
return CONTENT_TYPE;
183+
}
184+
}

0 commit comments

Comments
 (0)