From 214f929731da30cff846f0e2649cc6424b5a2d3b Mon Sep 17 00:00:00 2001 From: Shivansh Arora Date: Mon, 2 Sep 2024 14:27:41 +0530 Subject: [PATCH] Change remote state setting conditions (#16) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Optimize global ordinal includes/excludes for prefix matching (#14371) * Optimize global ordinal includes/excludes for prefix matching If an aggregration specifies includes or excludes based on a regular expression, and the regular expression has a finite expansion followed by .*, then we can optimize the global ordinal filter. Specifically, in this case, we can expand the matching prefixes, then include/exclude the range of global ordinals that start with each prefix. Signed-off-by: Michael Froh * Add unit test Signed-off-by: Michael Froh * Add changelog entry Signed-off-by: Michael Froh * Improve test coverage Updated the unit test to be functionally equivalent, but it covers more of the regex logic. Signed-off-by: Michael Froh * Improve test coverage Signed-off-by: Michael Froh * Fix bug in exclude-only case with no doc values in segment Signed-off-by: Michael Froh * Address comments from @mch2 Signed-off-by: Michael Froh --------- Signed-off-by: Michael Froh * Adding access to noSubMatches and noOverlappingMatches in Hyphenation… (#13895) * Adding access to noSubMatches and noOverlappingMatches in HyphenationCompoundWordTokenFilter Signed-off-by: Evan Kielley * Add Changelog Entry Signed-off-by: Mohammad Hasnain Mohsin Rajan * test: add hyphenation decompounder tests Signed-off-by: Mohammad Hasnain * test: refactor tests Signed-off-by: Mohammad Hasnain * test: reformat test files Signed-off-by: Mohammad Hasnain * chore: add changelog entry for 2.X Signed-off-by: Mohammad Hasnain * chore: remove 3.x changelog Signed-off-by: Mohammad Hasnain * chore: commonify settingsarr Signed-off-by: Mohammad Hasnain * chore: commonify settingsarr Signed-off-by: Mohammad Hasnain * chore: linting Signed-off-by: Mohammad Hasnain --------- Signed-off-by: Evan Kielley Signed-off-by: Mohammad Hasnain Mohsin Rajan Signed-off-by: Mohammad Hasnain Co-authored-by: Evan Kielley * Add Settings related to Workload Management feature (#15028) * add QeryGroup Service tests Signed-off-by: Ruirui Zhang * add PR to changelog Signed-off-by: Ruirui Zhang * change the test directory Signed-off-by: Ruirui Zhang * modify comments to be more specific Signed-off-by: Ruirui Zhang * add test coverage Signed-off-by: Ruirui Zhang * remove QUERY_GROUP_RUN_INTERVAL_SETTING as we'll define it in QueryGroupService Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang * Update affiliation for @nknize. (#15322) Signed-off-by: dblock * Add log when download completes with file size (#15224) Signed-off-by: Gaurav Bafna * Support Filtering on Large List encoded by Bitmap (version update) (#15352) Signed-off-by: Andriy Redko * Add support for index level slice count setting (#15336) Signed-off-by: Ganesh Ramadurai * Adding allowlist setting for ingest-useragent and ingest-geoip processors (#15325) * Adding allowlist setting for user-agent, geo-ip and updated tests for ingest-common. Signed-off-by: Sarat Vemulapalli * Remove duplicate test in ingest-common Signed-off-by: Sarat Vemulapalli * Adding changelog Signed-off-by: Sarat Vemulapalli --------- Signed-off-by: Sarat Vemulapalli * Add Delete QueryGroup API Logic (#14735) * Add Delete QueryGroup API Logic Signed-off-by: Ruirui Zhang * modify changelog Signed-off-by: Ruirui Zhang * include comments from create pr Signed-off-by: Ruirui Zhang * remove delete all Signed-off-by: Ruirui Zhang * rebase and address comments Signed-off-by: Ruirui Zhang * rebase Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang * address comments Signed-off-by: Ruirui Zhang * add UT coverage Signed-off-by: Ruirui Zhang * [Star Tree] Lucene Abstractions for Star Tree File Formats (#15278) --------- Signed-off-by: Sarthak Aggarwal * [Star tree] Changes to handle derived metrics such as avg as part of star tree mapping (#15152) --------- Signed-off-by: Bharathwaj G * relaxing the join validation for nodes which have only store disabled but only publication enabled * relaxing the join validation for nodes which have only store disabled but only publication enabled Signed-off-by: Rajiv Kumar Vaidyanathan --------- Signed-off-by: Michael Froh Signed-off-by: Evan Kielley Signed-off-by: Mohammad Hasnain Mohsin Rajan Signed-off-by: Mohammad Hasnain Signed-off-by: dblock Signed-off-by: Gaurav Bafna Signed-off-by: Andriy Redko Signed-off-by: Ganesh Ramadurai Signed-off-by: Sarat Vemulapalli Signed-off-by: Rajiv Kumar Vaidyanathan Co-authored-by: Michael Froh Co-authored-by: Mohammad Hasnain Mohsin Rajan Co-authored-by: Evan Kielley Co-authored-by: Ruirui Zhang Co-authored-by: Daniel (dB.) Doubrovkine Co-authored-by: Gaurav Bafna <85113518+gbbafna@users.noreply.github.com> Co-authored-by: Andriy Redko Co-authored-by: Ganesh Krishna Ramadurai Co-authored-by: Sarat Vemulapalli Co-authored-by: Sarthak Aggarwal Co-authored-by: Bharathwaj G Co-authored-by: Rajiv Kumar Vaidyanathan --- CHANGELOG.md | 6 + MAINTAINERS.md | 2 +- ...enationCompoundWordTokenFilterFactory.java | 9 +- .../common/CompoundAnalysisTests.java | 63 +- .../opensearch/analysis/common/da_UTF8.xml | 1208 +++++++++++++++++ .../ingest/geoip/IngestGeoIpModulePlugin.java | 40 +- .../geoip/IngestGeoIpModulePluginTests.java | 95 ++ .../IngestUserAgentModulePlugin.java | 39 +- .../IngestUserAgentModulePluginTests.java | 114 ++ .../plugin/wlm/WorkloadManagementPlugin.java | 15 +- .../wlm/WorkloadManagementPluginModule.java | 31 + .../wlm/action/DeleteQueryGroupAction.java | 38 + .../wlm/action/DeleteQueryGroupRequest.java | 65 + .../TransportCreateQueryGroupAction.java | 8 +- .../TransportDeleteQueryGroupAction.java | 91 ++ .../wlm/rest/RestDeleteQueryGroupAction.java | 57 + .../service/QueryGroupPersistenceService.java | 52 + .../action/CreateQueryGroupResponseTests.java | 4 +- .../action/DeleteQueryGroupRequestTests.java | 42 + .../TransportDeleteQueryGroupActionTests.java | 63 + .../rest/RestDeleteQueryGroupActionTests.java | 85 ++ .../QueryGroupPersistenceServiceTests.java | 58 + .../api/delete_query_group_context.json | 22 + .../rest-api-spec/test/wlm/10_query_group.yml | 6 + .../index/mapper/StarTreeMapperIT.java | 20 +- .../Lucene90DocValuesConsumerWrapper.java | 46 + .../Lucene90DocValuesProducerWrapper.java | 46 + .../SortedNumericDocValuesWriterWrapper.java | 53 + .../coordination/JoinTaskExecutor.java | 47 +- .../opensearch/cluster/metadata/Metadata.java | 13 +- .../metadata/RepositoriesMetadata.java | 18 + .../cluster/node/DiscoveryNode.java | 4 +- .../common/settings/ClusterSettings.java | 9 +- .../common/settings/IndexScopedSettings.java | 1 + .../org/opensearch/index/IndexSettings.java | 9 + .../composite/CompositeCodecFactory.java | 5 + .../LuceneDocValuesConsumerFactory.java | 50 + .../LuceneDocValuesProducerFactory.java | 60 + .../{ => composite99}/Composite99Codec.java | 2 +- .../Composite99DocValuesFormat.java | 2 +- .../Composite99DocValuesReader.java | 5 +- .../Composite99DocValuesWriter.java | 5 +- .../composite/composite99/package-info.java | 12 + .../compositeindex/datacube/MetricStat.java | 27 +- .../startree/StarTreeIndexSettings.java | 13 +- .../startree/builder/BaseStarTreeBuilder.java | 3 + .../index/mapper/StarTreeMapper.java | 48 +- .../index/query/TermsQueryBuilder.java | 4 +- .../store/RemoteStoreFileDownloader.java | 1 + .../org/opensearch/indices/TermsLookup.java | 4 +- .../remotestore/RemoteStoreNodeAttribute.java | 8 + .../search/DefaultSearchContext.java | 9 +- .../bucket/terms/IncludeExclude.java | 189 ++- .../wlm/WorkloadManagementSettings.java | 256 ++++ .../services/org.apache.lucene.codecs.Codec | 2 +- ...ransportClusterManagerNodeActionTests.java | 2 + .../coordination/JoinTaskExecutorTests.java | 22 +- .../MetadataCreateIndexServiceTests.java | 9 +- .../allocation/FailedShardsRoutingTests.java | 3 + ...eStoreMigrationAllocationDeciderTests.java | 2 + .../opensearch/index/codec/CodecTests.java | 2 +- .../LuceneDocValuesConsumerFactoryTests.java | 85 ++ .../LuceneDocValuesProducerFactoryTests.java | 124 ++ ...tedNumericDocValuesWriterWrapperTests.java | 94 ++ .../StarTreeDocValuesFormatTests.java | 2 +- .../index/mapper/StarTreeMapperTests.java | 183 ++- .../index/remote/RemoteStoreUtilsTests.java | 2 + .../opensearch/search/SearchServiceTests.java | 101 ++ .../terms}/IncludeExcludeTests.java | 252 +++- .../wlm/WorkloadManagementSettingsTests.java | 241 ++++ .../index/shard/IndexShardTestUtils.java | 2 + .../org/opensearch/analysis/common/test1.json | 30 +- .../org/opensearch/analysis/common/test1.yml | 18 + 73 files changed, 4237 insertions(+), 121 deletions(-) create mode 100644 modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml create mode 100644 modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/IngestUserAgentModulePluginTests.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPluginModule.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupAction.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequest.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupAction.java create mode 100644 plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupAction.java create mode 100644 plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequestTests.java create mode 100644 plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupActionTests.java create mode 100644 plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupActionTests.java create mode 100644 plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/delete_query_group_context.json create mode 100644 server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java create mode 100644 server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesProducerWrapper.java create mode 100644 server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java create mode 100644 server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java create mode 100644 server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java rename server/src/main/java/org/opensearch/index/codec/composite/{ => composite99}/Composite99Codec.java (97%) rename server/src/main/java/org/opensearch/index/codec/composite/{ => composite99}/Composite99DocValuesFormat.java (97%) rename server/src/main/java/org/opensearch/index/codec/composite/{ => composite99}/Composite99DocValuesReader.java (91%) rename server/src/main/java/org/opensearch/index/codec/composite/{ => composite99}/Composite99DocValuesWriter.java (97%) create mode 100644 server/src/main/java/org/opensearch/index/codec/composite/composite99/package-info.java create mode 100644 server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java create mode 100644 server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java create mode 100644 server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java create mode 100644 server/src/test/java/org/opensearch/index/codec/composite/SortedNumericDocValuesWriterWrapperTests.java rename server/src/test/java/org/opensearch/search/aggregations/{support => bucket/terms}/IncludeExcludeTests.java (58%) create mode 100644 server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 9411393661838..cd02af4f625b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix for hasInitiatedFetching to fix allocation explain and manual reroute APIs (([#14972](https://github.com/opensearch-project/OpenSearch/pull/14972)) - [Workload Management] Add queryGroupId to Task ([14708](https://github.com/opensearch-project/OpenSearch/pull/14708)) - Add setting to ignore throttling nodes for allocation of unassigned primaries in remote restore ([#14991](https://github.com/opensearch-project/OpenSearch/pull/14991)) +- [Workload Management] Add Delete QueryGroup API Logic ([#14735](https://github.com/opensearch-project/OpenSearch/pull/14735)) - [Streaming Indexing] Enhance RestClient with a new streaming API support ([#14437](https://github.com/opensearch-project/OpenSearch/pull/14437)) - Add basic aggregation support for derived fields ([#14618](https://github.com/opensearch-project/OpenSearch/pull/14618)) - [Workload Management] Add Create QueryGroup API Logic ([#14680](https://github.com/opensearch-project/OpenSearch/pull/14680))- [Workload Management] Add Create QueryGroup API Logic ([#14680](https://github.com/opensearch-project/OpenSearch/pull/14680)) @@ -18,9 +19,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add `rangeQuery` and `regexpQuery` for `constant_keyword` field type ([#14711](https://github.com/opensearch-project/OpenSearch/pull/14711)) - Add took time to request nodes stats ([#15054](https://github.com/opensearch-project/OpenSearch/pull/15054)) - [Workload Management] Add Get QueryGroup API Logic ([14709](https://github.com/opensearch-project/OpenSearch/pull/14709)) +- [Workload Management] Add Settings for Workload Management feature ([#15028](https://github.com/opensearch-project/OpenSearch/pull/15028)) - [Workload Management] QueryGroup resource tracking framework changes ([#13897](https://github.com/opensearch-project/OpenSearch/pull/13897)) - Support filtering on a large list encoded by bitmap ([#14774](https://github.com/opensearch-project/OpenSearch/pull/14774)) - Add slice execution listeners to SearchOperationListener interface ([#15153](https://github.com/opensearch-project/OpenSearch/pull/15153)) +- Add allowlist setting for ingest-geoip and ingest-useragent ([#15325](https://github.com/opensearch-project/OpenSearch/pull/15325)) +- Adding access to noSubMatches and noOverlappingMatches in Hyphenation ([#13895](https://github.com/opensearch-project/OpenSearch/pull/13895)) +- Add support for index level max slice count setting for concurrent segment search ([#15336](https://github.com/opensearch-project/OpenSearch/pull/15336)) ### Dependencies - Bump `netty` from 4.1.111.Final to 4.1.112.Final ([#15081](https://github.com/opensearch-project/OpenSearch/pull/15081)) @@ -44,6 +49,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Changed - Add lower limit for primary and replica batch allocators timeout ([#14979](https://github.com/opensearch-project/OpenSearch/pull/14979)) +- Optimize regexp-based include/exclude on aggregations when pattern matches prefixes ([#14371](https://github.com/opensearch-project/OpenSearch/pull/14371)) - Replace and block usages of org.apache.logging.log4j.util.Strings ([#15238](https://github.com/opensearch-project/OpenSearch/pull/15238)) ### Deprecated diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 296c2c30aca49..4a8aa9305df74 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -22,7 +22,7 @@ This document contains a list of maintainers in this repo. See [opensearch-proje | Varun Bansal | [linuxpi](https://github.com/linuxpi) | Amazon | | Marc Handalian | [mch2](https://github.com/mch2) | Amazon | | Michael Froh | [msfroh](https://github.com/msfroh) | Amazon | -| Nick Knize | [nknize](https://github.com/nknize) | Amazon | +| Nick Knize | [nknize](https://github.com/nknize) | Lucenia | | Owais Kazi | [owaiskazi19](https://github.com/owaiskazi19) | Amazon | | Peter Nied | [peternied](https://github.com/peternied) | Amazon | | Rishikesh Pasham | [Rishikesh1159](https://github.com/Rishikesh1159) | Amazon | diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index 8d29a347caeb8..181ebe5500ee5 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -54,11 +54,16 @@ */ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundWordTokenFilterFactory { + private final boolean noSubMatches; + private final boolean noOverlappingMatches; private final HyphenationTree hyphenationTree; HyphenationCompoundWordTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, env, name, settings); + noSubMatches = settings.getAsBoolean("no_sub_matches", false); + noOverlappingMatches = settings.getAsBoolean("no_overlapping_matches", false); + String hyphenationPatternsPath = settings.get("hyphenation_patterns_path", null); if (hyphenationPatternsPath == null) { throw new IllegalArgumentException("hyphenation_patterns_path is a required setting."); @@ -85,7 +90,9 @@ public TokenStream create(TokenStream tokenStream) { minWordSize, minSubwordSize, maxSubwordSize, - onlyLongestMatch + onlyLongestMatch, + noSubMatches, + noOverlappingMatches ); } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java index a681d9a104ecf..bfe30da50c055 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CompoundAnalysisTests.java @@ -50,8 +50,12 @@ import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.MatcherAssert; +import org.junit.Before; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -63,17 +67,27 @@ import static org.hamcrest.Matchers.instanceOf; public class CompoundAnalysisTests extends OpenSearchTestCase { + + Settings[] settingsArr; + + @Before + public void initialize() throws IOException { + final Path home = createTempDir(); + copyHyphenationPatternsFile(home); + this.settingsArr = new Settings[] { getJsonSettings(home), getYamlSettings(home) }; + } + public void testDefaultsCompoundAnalysis() throws Exception { - Settings settings = getJsonSettings(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); - AnalysisModule analysisModule = createAnalysisModule(settings); - TokenFilterFactory filterFactory = analysisModule.getAnalysisRegistry().buildTokenFilterFactories(idxSettings).get("dict_dec"); - MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); + for (Settings settings : this.settingsArr) { + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); + AnalysisModule analysisModule = createAnalysisModule(settings); + TokenFilterFactory filterFactory = analysisModule.getAnalysisRegistry().buildTokenFilterFactories(idxSettings).get("dict_dec"); + MatcherAssert.assertThat(filterFactory, instanceOf(DictionaryCompoundWordTokenFilterFactory.class)); + } } public void testDictionaryDecompounder() throws Exception { - Settings[] settingsArr = new Settings[] { getJsonSettings(), getYamlSettings() }; - for (Settings settings : settingsArr) { + for (Settings settings : this.settingsArr) { List terms = analyze(settings, "decompoundingAnalyzer", "donaudampfschiff spargelcremesuppe"); MatcherAssert.assertThat(terms.size(), equalTo(8)); MatcherAssert.assertThat( @@ -83,6 +97,26 @@ public void testDictionaryDecompounder() throws Exception { } } + // Hyphenation Decompounder tests mimic the behavior of lucene tests + // lucene/analysis/common/src/test/org/apache/lucene/analysis/compound/TestHyphenationCompoundWordTokenFilterFactory.java + public void testHyphenationDecompounder() throws Exception { + for (Settings settings : this.settingsArr) { + List terms = analyze(settings, "hyphenationAnalyzer", "min veninde som er lidt af en læsehest"); + MatcherAssert.assertThat(terms.size(), equalTo(10)); + MatcherAssert.assertThat(terms, hasItems("min", "veninde", "som", "er", "lidt", "af", "en", "læsehest", "læse", "hest")); + } + } + + // Hyphenation Decompounder tests mimic the behavior of lucene tests + // lucene/analysis/common/src/test/org/apache/lucene/analysis/compound/TestHyphenationCompoundWordTokenFilterFactory.java + public void testHyphenationDecompounderNoSubMatches() throws Exception { + for (Settings settings : this.settingsArr) { + List terms = analyze(settings, "hyphenationAnalyzerNoSubMatches", "basketballkurv"); + MatcherAssert.assertThat(terms.size(), equalTo(3)); + MatcherAssert.assertThat(terms, hasItems("basketballkurv", "basketball", "kurv")); + } + } + private List analyze(Settings settings, String analyzerName, String text) throws IOException { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings); AnalysisModule analysisModule = createAnalysisModule(settings); @@ -111,21 +145,28 @@ public Map> getTokenFilters() { })); } - private Settings getJsonSettings() throws IOException { + private void copyHyphenationPatternsFile(Path home) throws IOException { + InputStream hyphenation_patterns_path = getClass().getResourceAsStream("da_UTF8.xml"); + Path config = home.resolve("config"); + Files.createDirectory(config); + Files.copy(hyphenation_patterns_path, config.resolve("da_UTF8.xml")); + } + + private Settings getJsonSettings(Path home) throws IOException { String json = "/org/opensearch/analysis/common/test1.json"; return Settings.builder() .loadFromStream(json, getClass().getResourceAsStream(json), false) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) .build(); } - private Settings getYamlSettings() throws IOException { + private Settings getYamlSettings(Path home) throws IOException { String yaml = "/org/opensearch/analysis/common/test1.yml"; return Settings.builder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml), false) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), home.toString()) .build(); } } diff --git a/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml new file mode 100644 index 0000000000000..2c8d203be6881 --- /dev/null +++ b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/da_UTF8.xml @@ -0,0 +1,1208 @@ + + + + + + + + + + +aA +bB +cC +dD +eE +fF +gG +hH +iI +jJ +kK +lL +mM +nN +oO +pP +qQ +rR +sS +tT +uU +vV +wW +xX +yY +zZ +æÆ +øØ +åÅ + + + +.ae3 +.an3k +.an1s +.be5la +.be1t +.bi4tr +.der3i +.diagno5 +.her3 +.hoved3 +.ne4t5 +.om1 +.ove4 +.po1 +.til3 +.yd5r +ab5le +3abst +a3c +ade5la +5adg +a1e +5afg +5a4f1l +af3r +af4ri +5afs +a4gef +a4gi +ag5in +ag5si +3agti +a4gy +a3h +ais5t +a3j +a5ka +a3ke +a5kr +aku5 +a3la +a1le +a1li +al3k +4alkv +a1lo +al5si +a3lu +a1ly +am4pa +3analy +an4k5r +a3nu +3anv +a5o +a5pe +a3pi +a5po +a1ra +ar5af +1arb +a1re +5arg +a1ri +a3ro +a3sa +a3sc +a1si +a3sk +a3so +3a3sp +a3ste +a3sti +a1ta1 +a1te +a1ti +a4t5in +a1to +ato5v +a5tr +a1tu +a5va +a1ve +a5z +1ba +ba4ti +4bd +1be +be1k +be3ro +be5ru +be1s4 +be1tr +1bi +bi5sk +b1j +4b1n +1bo +bo4gr +bo3ra +bo5re +1br4 +4bs +bs5k +b3so +b1st +b5t +3bu +bu4s5tr +b5w +1by +by5s +4c1c +1ce +ce5ro +3ch +4ch. +ci4o +ck3 +5cy +3da +4d3af +d5anta +da4s +d1b +d1d4 +1de +de5d +4de4lem +der5eri +de4rig +de5sk +d1f +d1g +d3h +1di +di1e +di5l +d3j +d1k +d1l +d1m +4d1n +3do +4dop +d5ov +d1p +4drett +5d4reve +3drif +3driv +d5ros +d5ru +ds5an +ds5in +d1ski +d4sm +d4su +dsu5l +ds5vi +d3ta +d1te +dt5o +d5tr +dt5u +1du +dub5 +d1v +3dy +e5ad +e3af +e5ag +e3ak +e1al +ea4la +e3an +e5ap +e3at +e3bl +ebs3 +e1ci +ed5ar +edde4 +eddel5 +e4do +ed5ra +ed3re +ed3rin +ed4str +e3e +3eff +e3fr +3eft +e3gu +e1h +e3in +ei5s +e3je +e4j5el +e1ka +e3ke +e3kl +4e1ko +e5kr +ek5sa +3eksem +3eksp +e3ku +e1kv +e5ky +e3lad +el3ak +el3ar +e1las +e3le +e4lek +3elem +e1li +5elim +e3lo +el5sa +e5lu +e3ly +e4mad +em4p5le +em1s +en5ak +e4nan +4enn +e4no +en3so +e5nu +e5ol +e3op +e1or +e3ov +epi3 +e1pr +e3ra +er3af +e4rag +e4rak +e1re +e4ref +er5ege +5erhv +e1ri +e4rib +er1k +ero5d +er5ov +er3s +er5tr +e3rum +er5un +e5ry +e1ta +e1te +etek4s +e1ti +e3tj +e1to +e3tr +e3tu +e1ty +e3um +e3un +3eur +e1va +e3ve +e4v3erf +e1vi +e5x +1fa +fa4ce +fags3 +f1b +f1d +1fe +fej4 +fejl1 +f1f +f1g +f1h +1fi +f1k +3fl +1fo +for1en +fo4ri +f1p +f1s4 +4ft +f3ta +f1te +f1ti +f5to +f5tvi +1fu +f1v +3fy +1ga +g3art +g1b +g1d +1ge +4g5enden +ger3in +ge3s +g3f +g1g +g1h +1gi +gi4b +gi3st +5gj +g3k +g1l +g1m +3go +4g5om +g5ov +g3p +1gr +gs1a +gsde4len +g4se +gsha4 +g5sla +gs3or +gs1p +g5s4tide +g4str +gs1v +g3ta +g1te +g1ti +g5to +g3tr +gt4s +g3ud +gun5 +g3v +1gy +g5yd +4ha. +heds3 +he5s +4het +hi4e +hi4n5 +hi3s +ho5ko +ho5ve +4h3t +hun4 +hund3 +hvo4 +i1a +i3b +i4ble +i1c +i3dr +ids5k +i1el +i1en +i3er +i3et. +if3r +i3gu +i3h +i5i +i5j +i1ka +i1ke +ik1l +i5ko +ik3re +ik5ri +iks5t +ik4tu +i3ku +ik3v +i3lag +il3eg +il5ej +il5el +i3li +i4l5id +il3k +i1lo +il5u +i3mu +ind3t +5inf +ings1 +in3s +in4sv +inter1 +i3nu +i3od +i3og +i5ok +i3ol +ion4 +ions1 +i5o5r +i3ot +i5pi +i3pli +i5pr +i3re +i3ri +ir5t +i3sc +i3si +i4sm +is3p +i1ster +i3sti +i5sua +i1ta +i1te +i1ti +i3to +i3tr +it5re. +i1tu +i3ty +i1u +i1va +i1ve +i1vi +j3ag +jde4rer +jds1 +jek4to +4j5en. +j5k +j3le +j3li +jlmeld5 +jlmel4di +j3r +jre5 +ju3s +5kap +k5au +5kav +k5b +kel5s +ke3sk +ke5st +ke4t5a +k3h +ki3e +ki3st +k1k +k5lak +k1le +3klu +k4ny +5kod +1kon +ko3ra +3kort +ko3v +1kra +5kry +ks3an +k1si +ks3k +ks1p +k3ste +k5stu +ks5v +k1t +k4tar +k4terh +kti4e +kt5re +kt5s +3kur +1kus +3kut +k4vo +k4vu +5lab +lad3r +5lagd +la4g3r +5lam +1lat +l1b +ldiagnos5 +l3dr +ld3st +1le. +5led +4lele +le4mo +3len +1ler +1les +4leu +l1f +lfin4 +lfind5 +l1go1 +l3h +li4ga +4l5ins +4l3int +li5o +l3j +l1ke +l1ko +l3ky +l1l +l5mu +lo4du +l3op +4l5or +3lov +4l3p +l4ps +l3r +4ls +lses1 +ls5in +l5sj +l1ta +l4taf +l1te +l4t5erf +l3ti +lt3o +l3tr +l3tu +lu5l +l3ve +l3vi +1ma +m1b +m3d +1me +4m5ej +m3f +m1g +m3h +1mi +mi3k +m5ing +mi4o +mi5sty +m3k +m1l +m1m +mmen5 +m1n +3mo +mo4da +4mop +4m5ov +m1pe +m3pi +m3pl +m1po +m3pr +m1r +mse5s +ms5in +m5sk +ms3p +m3ste +ms5v +m3ta +m3te +m3ti +m3tr +m1ud +1mul +mu1li +3my +3na +4nak +1nal +n1b +n1c +4nd +n3dr +nd5si +nd5sk +nd5sp +1ne +ne5a +ne4da +nemen4 +nement5e +neo4 +n3erk +n5erl +ne5sl +ne5st +n1f +n4go +4n1h +1ni +4nim +ni5o +ni3st +n1ke +n1ko +n3kr +n3ku +n5kv +4n1l +n1m +n1n +1no +n3ord +n5p +n3r +4ns +n3si +n1sku +ns3po +n1sta +n5sti +n1ta +nta4le +n1te +n1ti +ntiali4 +n3to +n1tr +nt4s5t +nt4su +n3tu +n3ty +4n1v +3ny +n3z +o3a +o4as +ob3li +o1c +o4din +od5ri +od5s +od5un +o1e +of5r +o4gek +o4gel +o4g5o +og5re +og5sk +o5h +o5in +oi6s5e +o1j +o3ka +o1ke +o3ku +o3la +o3le +o1li +o1lo +o3lu +o5ly +1omr +on3k +ook5 +o3or +o5ov +o3pi +op3l +op3r +op3s +3opta +4or. +or1an +3ordn +ord5s +o3re. +o3reg +o3rek +o3rer +o3re3s +o3ret +o3ri +3orient +or5im +o4r5in +or3k +or5o +or3sl +or3st +o3si +o3so +o3t +o1te +o5un +ov4s +3pa +pa5gh +p5anl +p3d +4pec +3pen +1per +pe1ra +pe5s +pe3u +p3f +4p5h +1pla +p4lan +4ple. +4pler +4ples +p3m +p3n +5pok +4po3re +3pot +4p5p4 +p4ro +1proc +p3sk +p5so +ps4p +p3st +p1t +1pu +pu5b +p5ule +p5v +5py3 +qu4 +4raf +ra5is +4rarb +r1b +r4d5ar +r3dr +rd4s3 +4reks +1rel +re5la +r5enss +5rese +re5spo +4ress +re3st +re5s4u +5rett +r1f +r1gu +r1h +ri1e +ri5la +4rimo +r4ing +ringse4 +ringso4r +4rinp +4rint +r3ka +r1ke +r1ki +rk3so +r3ku +r1l +rmo4 +r5mu +r1n +ro1b +ro3p +r3or +r3p +r1r +rre5s +rro4n5 +r1sa +r1si +r5skr +r4sk5v +rs4n +r3sp +r5stu +r5su +r3sv +r5tal +r1te +r4teli +r1ti +r3to +r4t5or +rt5rat +rt3re +r5tri +r5tro +rt3s +r5ty +r3ud +run4da +5rut +r3va +r1ve +r3vi +ry4s +s3af +1sam +sa4ma +s3ap +s1ar +1sat +4s1b +s1d +sdy4 +1se +s4ed +5s4er +se4se +s1f +4s1g4 +4s3h +si4bl +1sig +s5int +5sis +5sit +5siu +s5ju +4sk. +1skab +1ske +s3kl +sk5s4 +5sky +s1le +s1li +slo3 +5slu +s5ly +s1m +s4my +4snin +s4nit +so5k +5sol +5som. +3somm +s5oms +5somt +3son +4s1op +sp4 +3spec +4sper +3s4pi +s1pl +3sprog. +s5r4 +s1s4 +4st. +5s4tam +1stan +st5as +3stat +1stav +1ste. +1sted +3stel +5stemo +1sten +5step +3ster. +3stes +5stet +5stj +3sto +st5om +1str +s1ud +3sul +s3un +3sur +s3ve +3s4y +1sy1s +5ta. +1tag +tands3 +4tanv +4tb +tede4l +teds5 +3teg +5tekn +teo1 +5term +te5ro +4t1f +6t3g +t1h +tialis5t +3tid +ti4en +ti3st +4t3k +4t1l +tli4s5 +t1m +t1n +to5ra +to1re +to1ri +tor4m +4t3p +t4ra +4tres +tro5v +1try +4ts +t3si +ts4pa +ts5pr +t3st +ts5ul +4t1t +t5uds +5tur +t5ve +1typ +u1a +5udl +ud5r +ud3s +3udv +u1e +ue4t5 +uge4ri +ugs3 +u5gu +u3i +u5kl +uk4ta +uk4tr +u1la +u1le +u5ly +u5pe +up5l +u5q +u3ra +u3re +u4r3eg +u1rer +u3ro +us5a +u3si +u5ska +u5so +us5v +u1te +u1ti +u1to +ut5r +ut5s4 +5u5v +va5d +3varm +1ved +ve4l5e +ve4reg +ve3s +5vet +v5h +vi4l3in +1vis +v5j +v5k +vl4 +v3le +v5li +vls1 +1vo +4v5om +v5p +v5re +v3st +v5su +v5t +3vu +y3a +y5dr +y3e +y3ke +y5ki +yk3li +y3ko +yk4s5 +y3kv +y5li +y5lo +y5mu +yns5 +y5o +y1pe +y3pi +y3re +yr3ek +y3ri +y3si +y3ti +y5t3r +y5ve +zi5o + +.så3 +.ær5i +.øv3r +a3tø +a5væ +brød3 +5bæ +5drøv +dstå4 +3dæ +3dø +e3læ +e3lø +e3rø +er5øn +e5tæ +e5tø +e1væ +e3æ +e5å +3fæ +3fø +fø4r5en +giø4 +g4sø +g5så +3gæ +3gø1 +3gå +i5tæ +i3ø +3kø +3kå +lingeniø4 +l3væ +5løs +m5tå +1mæ +3mø +3må +n3kæ +n5tæ +3næ +4n5æb +5nø +o5læ +or3ø +o5å +5præ +5pæd +på3 +r5kæ +r5tæ +r5tø +r3væ +r5æl +4røn +5rør +3råd +r5år +s4kå +3slå +s4næ +5stø +1stå +1sæ +4s5æn +1sø +s5øk +så4r5 +ti4ø +3træk. +t4sø +t5så +t3væ +u3læ +3værd +1værk +5vå +y5væ +æb3l +æ3c +æ3e +æg5a +æ4gek +æ4g5r +ægs5 +æ5i +æ5kv +ælle4 +æn1dr +æ5o +æ1re +ær4g5r +æ3ri +ær4ma +ær4mo +ær5s +æ5si +æ3so +æ3ste +æ3ve +øde5 +ø3e +ø1je +ø3ke +ø3le +øms5 +øn3st +øn4t3 +ø1re +ø3ri +ørne3 +ør5o +ø1ve +å1d +å1e +å5h +å3l +å3re +års5t +å5sk +å3t + + diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpModulePlugin.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpModulePlugin.java index 524d1719c37c0..742b923f05199 100644 --- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpModulePlugin.java +++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/IngestGeoIpModulePlugin.java @@ -44,6 +44,7 @@ import org.opensearch.common.cache.CacheBuilder; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; import org.opensearch.common.util.io.IOUtils; import org.opensearch.ingest.Processor; import org.opensearch.plugins.IngestPlugin; @@ -62,10 +63,18 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.Function; +import java.util.stream.Collectors; import java.util.stream.Stream; public class IngestGeoIpModulePlugin extends Plugin implements IngestPlugin, Closeable { + static final Setting> PROCESSORS_ALLOWLIST_SETTING = Setting.listSetting( + "ingest.geoip.processors.allowed", + List.of(), + Function.identity(), + Setting.Property.NodeScope + ); public static final Setting CACHE_SIZE = Setting.longSetting("ingest.geoip.cache_size", 1000, 0, Setting.Property.NodeScope); static String[] DEFAULT_DATABASE_FILENAMES = new String[] { "GeoLite2-ASN.mmdb", "GeoLite2-City.mmdb", "GeoLite2-Country.mmdb" }; @@ -74,7 +83,7 @@ public class IngestGeoIpModulePlugin extends Plugin implements IngestPlugin, Clo @Override public List> getSettings() { - return Arrays.asList(CACHE_SIZE); + return Arrays.asList(CACHE_SIZE, PROCESSORS_ALLOWLIST_SETTING); } @Override @@ -90,7 +99,10 @@ public Map getProcessors(Processor.Parameters paramet } catch (IOException e) { throw new RuntimeException(e); } - return Collections.singletonMap(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(cacheSize))); + return filterForAllowlistSetting( + parameters.env.settings(), + Map.of(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(cacheSize))) + ); } /* @@ -175,6 +187,30 @@ public void close() throws IOException { } } + private Map filterForAllowlistSetting(Settings settings, Map map) { + if (PROCESSORS_ALLOWLIST_SETTING.exists(settings) == false) { + return Map.copyOf(map); + } + final Set allowlist = Set.copyOf(PROCESSORS_ALLOWLIST_SETTING.get(settings)); + // Assert that no unknown processors are defined in the allowlist + final Set unknownAllowlistProcessors = allowlist.stream() + .filter(p -> map.containsKey(p) == false) + .collect(Collectors.toUnmodifiableSet()); + if (unknownAllowlistProcessors.isEmpty() == false) { + throw new IllegalArgumentException( + "Processor(s) " + + unknownAllowlistProcessors + + " were defined in [" + + PROCESSORS_ALLOWLIST_SETTING.getKey() + + "] but do not exist" + ); + } + return map.entrySet() + .stream() + .filter(e -> allowlist.contains(e.getKey())) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + } + /** * The in-memory cache for the geoip data. There should only be 1 instance of this class.. * This cache differs from the maxmind's {@link NodeCache} such that this cache stores the deserialized Json objects to avoid the diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpModulePluginTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpModulePluginTests.java index f6d42d1d65670..9446ec1228532 100644 --- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpModulePluginTests.java +++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/IngestGeoIpModulePluginTests.java @@ -35,8 +35,20 @@ import com.maxmind.geoip2.model.AbstractResponse; import org.opensearch.common.network.InetAddresses; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.TestEnvironment; +import org.opensearch.ingest.Processor; import org.opensearch.ingest.geoip.IngestGeoIpModulePlugin.GeoIpCache; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.StreamsUtils; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Set; import static org.mockito.Mockito.mock; @@ -77,4 +89,87 @@ public void testInvalidInit() { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new GeoIpCache(-1)); assertEquals("geoip max cache size must be 0 or greater", ex.getMessage()); } + + public void testAllowList() throws IOException { + runAllowListTest(List.of()); + runAllowListTest(List.of("geoip")); + } + + public void testInvalidAllowList() throws IOException { + List invalidAllowList = List.of("set"); + Settings.Builder settingsBuilder = Settings.builder() + .putList(IngestGeoIpModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey(), invalidAllowList); + createDb(settingsBuilder); + try (IngestGeoIpModulePlugin plugin = new IngestGeoIpModulePlugin()) { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> plugin.getProcessors(createParameters(settingsBuilder.build())) + ); + assertEquals( + "Processor(s) " + + invalidAllowList + + " were defined in [" + + IngestGeoIpModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey() + + "] but do not exist", + e.getMessage() + ); + } + } + + public void testAllowListNotSpecified() throws IOException { + Settings.Builder settingsBuilder = Settings.builder(); + settingsBuilder.remove(IngestGeoIpModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey()); + createDb(settingsBuilder); + try (IngestGeoIpModulePlugin plugin = new IngestGeoIpModulePlugin()) { + final Set expected = Set.of("geoip"); + assertEquals(expected, plugin.getProcessors(createParameters(settingsBuilder.build())).keySet()); + } + } + + private void runAllowListTest(List allowList) throws IOException { + Settings.Builder settingsBuilder = Settings.builder(); + createDb(settingsBuilder); + final Settings settings = settingsBuilder.putList(IngestGeoIpModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey(), allowList).build(); + try (IngestGeoIpModulePlugin plugin = new IngestGeoIpModulePlugin()) { + assertEquals(Set.copyOf(allowList), plugin.getProcessors(createParameters(settings)).keySet()); + } + } + + private void createDb(Settings.Builder settingsBuilder) throws IOException { + Path configDir = createTempDir(); + Path userAgentConfigDir = configDir.resolve("ingest-geoip"); + Files.createDirectories(userAgentConfigDir); + settingsBuilder.put("ingest.geoip.database_path", configDir).put("path.home", configDir); + try { + Files.copy( + new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")), + configDir.resolve("GeoLite2-City.mmdb") + ); + Files.copy( + new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")), + configDir.resolve("GeoLite2-Country.mmdb") + ); + Files.copy( + new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb")), + configDir.resolve("GeoLite2-ASN.mmdb") + ); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + private static Processor.Parameters createParameters(Settings settings) { + return new Processor.Parameters( + TestEnvironment.newEnvironment(settings), + null, + null, + null, + () -> 0L, + (a, b) -> null, + null, + null, + $ -> {}, + null + ); + } } diff --git a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentModulePlugin.java b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentModulePlugin.java index cd96fcf2347ef..bac90d20b44e1 100644 --- a/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentModulePlugin.java +++ b/modules/ingest-user-agent/src/main/java/org/opensearch/ingest/useragent/IngestUserAgentModulePlugin.java @@ -33,6 +33,7 @@ package org.opensearch.ingest.useragent; import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; import org.opensearch.ingest.Processor; import org.opensearch.plugins.IngestPlugin; import org.opensearch.plugins.Plugin; @@ -47,10 +48,19 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; import java.util.stream.Stream; public class IngestUserAgentModulePlugin extends Plugin implements IngestPlugin { + static final Setting> PROCESSORS_ALLOWLIST_SETTING = Setting.listSetting( + "ingest.useragent.processors.allowed", + List.of(), + Function.identity(), + Setting.Property.NodeScope + ); private final Setting CACHE_SIZE_SETTING = Setting.longSetting( "ingest.user_agent.cache_size", 1000, @@ -77,7 +87,34 @@ public Map getProcessors(Processor.Parameters paramet } catch (IOException e) { throw new RuntimeException(e); } - return Collections.singletonMap(UserAgentProcessor.TYPE, new UserAgentProcessor.Factory(userAgentParsers)); + return filterForAllowlistSetting( + parameters.env.settings(), + Collections.singletonMap(UserAgentProcessor.TYPE, new UserAgentProcessor.Factory(userAgentParsers)) + ); + } + + private Map filterForAllowlistSetting(Settings settings, Map map) { + if (PROCESSORS_ALLOWLIST_SETTING.exists(settings) == false) { + return Map.copyOf(map); + } + final Set allowlist = Set.copyOf(PROCESSORS_ALLOWLIST_SETTING.get(settings)); + // Assert that no unknown processors are defined in the allowlist + final Set unknownAllowlistProcessors = allowlist.stream() + .filter(p -> map.containsKey(p) == false) + .collect(Collectors.toUnmodifiableSet()); + if (unknownAllowlistProcessors.isEmpty() == false) { + throw new IllegalArgumentException( + "Processor(s) " + + unknownAllowlistProcessors + + " were defined in [" + + PROCESSORS_ALLOWLIST_SETTING.getKey() + + "] but do not exist" + ); + } + return map.entrySet() + .stream() + .filter(e -> allowlist.contains(e.getKey())) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); } static Map createUserAgentParsers(Path userAgentConfigDirectory, UserAgentCache cache) throws IOException { diff --git a/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/IngestUserAgentModulePluginTests.java b/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/IngestUserAgentModulePluginTests.java new file mode 100644 index 0000000000000..31fdafff1188a --- /dev/null +++ b/modules/ingest-user-agent/src/test/java/org/opensearch/ingest/useragent/IngestUserAgentModulePluginTests.java @@ -0,0 +1,114 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.useragent; + +import org.opensearch.common.settings.Settings; +import org.opensearch.env.TestEnvironment; +import org.opensearch.ingest.Processor; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Set; + +public class IngestUserAgentModulePluginTests extends OpenSearchTestCase { + private Settings.Builder settingsBuilder; + + @Before + public void setup() throws IOException { + Path configDir = createTempDir(); + Path userAgentConfigDir = configDir.resolve("ingest-user-agent"); + Files.createDirectories(userAgentConfigDir); + settingsBuilder = Settings.builder().put("ingest-user-agent", configDir).put("path.home", configDir); + + // Copy file, leaving out the device parsers at the end + String regexWithoutDevicesFilename = "regexes_without_devices.yml"; + try ( + BufferedReader reader = new BufferedReader( + new InputStreamReader(UserAgentProcessor.class.getResourceAsStream("/regexes.yml"), StandardCharsets.UTF_8) + ); + BufferedWriter writer = Files.newBufferedWriter(userAgentConfigDir.resolve(regexWithoutDevicesFilename)); + ) { + String line; + while ((line = reader.readLine()) != null) { + if (line.startsWith("device_parsers:")) { + break; + } + + writer.write(line); + writer.newLine(); + } + } + } + + public void testAllowList() throws IOException { + runAllowListTest(List.of()); + runAllowListTest(List.of("user_agent")); + } + + public void testInvalidAllowList() throws IOException { + List invalidAllowList = List.of("set"); + final Settings settings = settingsBuilder.putList( + IngestUserAgentModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey(), + invalidAllowList + ).build(); + try (IngestUserAgentModulePlugin plugin = new IngestUserAgentModulePlugin()) { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> plugin.getProcessors(createParameters(settings)) + ); + assertEquals( + "Processor(s) " + + invalidAllowList + + " were defined in [" + + IngestUserAgentModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey() + + "] but do not exist", + e.getMessage() + ); + } + } + + public void testAllowListNotSpecified() throws IOException { + settingsBuilder.remove(IngestUserAgentModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey()); + try (IngestUserAgentModulePlugin plugin = new IngestUserAgentModulePlugin()) { + final Set expected = Set.of("user_agent"); + assertEquals(expected, plugin.getProcessors(createParameters(settingsBuilder.build())).keySet()); + } + } + + private void runAllowListTest(List allowList) throws IOException { + final Settings settings = settingsBuilder.putList(IngestUserAgentModulePlugin.PROCESSORS_ALLOWLIST_SETTING.getKey(), allowList) + .build(); + try (IngestUserAgentModulePlugin plugin = new IngestUserAgentModulePlugin()) { + assertEquals(Set.copyOf(allowList), plugin.getProcessors(createParameters(settings)).keySet()); + } + } + + private static Processor.Parameters createParameters(Settings settings) { + return new Processor.Parameters( + TestEnvironment.newEnvironment(settings), + null, + null, + null, + () -> 0L, + (a, b) -> null, + null, + null, + $ -> {}, + null + ); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java index 6b4496af76dc3..64f510fa1db67 100644 --- a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPlugin.java @@ -11,6 +11,7 @@ import org.opensearch.action.ActionRequest; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.common.inject.Module; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Setting; @@ -18,10 +19,13 @@ import org.opensearch.common.settings.SettingsFilter; import org.opensearch.core.action.ActionResponse; import org.opensearch.plugin.wlm.action.CreateQueryGroupAction; +import org.opensearch.plugin.wlm.action.DeleteQueryGroupAction; import org.opensearch.plugin.wlm.action.GetQueryGroupAction; import org.opensearch.plugin.wlm.action.TransportCreateQueryGroupAction; +import org.opensearch.plugin.wlm.action.TransportDeleteQueryGroupAction; import org.opensearch.plugin.wlm.action.TransportGetQueryGroupAction; import org.opensearch.plugin.wlm.rest.RestCreateQueryGroupAction; +import org.opensearch.plugin.wlm.rest.RestDeleteQueryGroupAction; import org.opensearch.plugin.wlm.rest.RestGetQueryGroupAction; import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService; import org.opensearch.plugins.ActionPlugin; @@ -29,6 +33,7 @@ import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; +import java.util.Collection; import java.util.List; import java.util.function.Supplier; @@ -46,7 +51,8 @@ public WorkloadManagementPlugin() {} public List> getActions() { return List.of( new ActionPlugin.ActionHandler<>(CreateQueryGroupAction.INSTANCE, TransportCreateQueryGroupAction.class), - new ActionPlugin.ActionHandler<>(GetQueryGroupAction.INSTANCE, TransportGetQueryGroupAction.class) + new ActionPlugin.ActionHandler<>(GetQueryGroupAction.INSTANCE, TransportGetQueryGroupAction.class), + new ActionPlugin.ActionHandler<>(DeleteQueryGroupAction.INSTANCE, TransportDeleteQueryGroupAction.class) ); } @@ -60,11 +66,16 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - return List.of(new RestCreateQueryGroupAction(), new RestGetQueryGroupAction()); + return List.of(new RestCreateQueryGroupAction(), new RestGetQueryGroupAction(), new RestDeleteQueryGroupAction()); } @Override public List> getSettings() { return List.of(QueryGroupPersistenceService.MAX_QUERY_GROUP_COUNT); } + + @Override + public Collection createGuiceModules() { + return List.of(new WorkloadManagementPluginModule()); + } } diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPluginModule.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPluginModule.java new file mode 100644 index 0000000000000..b7c7805639eb2 --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/WorkloadManagementPluginModule.java @@ -0,0 +1,31 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm; + +import org.opensearch.common.inject.AbstractModule; +import org.opensearch.common.inject.Singleton; +import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService; + +/** + * Guice Module to manage WorkloadManagement related objects + */ +public class WorkloadManagementPluginModule extends AbstractModule { + + /** + * Constructor for WorkloadManagementPluginModule + */ + public WorkloadManagementPluginModule() {} + + @Override + protected void configure() { + // Bind QueryGroupPersistenceService as a singleton to ensure a single instance is used, + // preventing multiple throttling key registrations in the constructor. + bind(QueryGroupPersistenceService.class).in(Singleton.class); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupAction.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupAction.java new file mode 100644 index 0000000000000..c78952a2f89ad --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupAction.java @@ -0,0 +1,38 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Transport action for delete QueryGroup + * + * @opensearch.experimental + */ +public class DeleteQueryGroupAction extends ActionType { + + /** + /** + * An instance of DeleteQueryGroupAction + */ + public static final DeleteQueryGroupAction INSTANCE = new DeleteQueryGroupAction(); + + /** + * Name for DeleteQueryGroupAction + */ + public static final String NAME = "cluster:admin/opensearch/wlm/query_group/_delete"; + + /** + * Default constructor + */ + private DeleteQueryGroupAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequest.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequest.java new file mode 100644 index 0000000000000..e514943c2c7e9 --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequest.java @@ -0,0 +1,65 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.support.master.AcknowledgedRequest; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * Request for delete QueryGroup + * + * @opensearch.experimental + */ +public class DeleteQueryGroupRequest extends AcknowledgedRequest { + private final String name; + + /** + * Default constructor for DeleteQueryGroupRequest + * @param name - name for the QueryGroup to get + */ + public DeleteQueryGroupRequest(String name) { + this.name = name; + } + + /** + * Constructor for DeleteQueryGroupRequest + * @param in - A {@link StreamInput} object + */ + public DeleteQueryGroupRequest(StreamInput in) throws IOException { + super(in); + name = in.readOptionalString(); + } + + @Override + public ActionRequestValidationException validate() { + if (name == null) { + ActionRequestValidationException actionRequestValidationException = new ActionRequestValidationException(); + actionRequestValidationException.addValidationError("QueryGroup name is missing"); + return actionRequestValidationException; + } + return null; + } + + /** + * Name getter + */ + public String getName() { + return name; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(name); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportCreateQueryGroupAction.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportCreateQueryGroupAction.java index 01aa8cfb5e610..190ff17261bb4 100644 --- a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportCreateQueryGroupAction.java +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportCreateQueryGroupAction.java @@ -14,7 +14,6 @@ import org.opensearch.core.action.ActionListener; import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService; import org.opensearch.tasks.Task; -import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; /** @@ -24,7 +23,6 @@ */ public class TransportCreateQueryGroupAction extends HandledTransportAction { - private final ThreadPool threadPool; private final QueryGroupPersistenceService queryGroupPersistenceService; /** @@ -33,7 +31,6 @@ public class TransportCreateQueryGroupAction extends HandledTransportAction listener) { - threadPool.executor(ThreadPool.Names.SAME) - .execute(() -> queryGroupPersistenceService.persistInClusterStateMetadata(request.getQueryGroup(), listener)); + queryGroupPersistenceService.persistInClusterStateMetadata(request.getQueryGroup(), listener); } } diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupAction.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupAction.java new file mode 100644 index 0000000000000..e4d3908d4a208 --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupAction.java @@ -0,0 +1,91 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.block.ClusterBlockException; +import org.opensearch.cluster.block.ClusterBlockLevel; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; + +/** + * Transport action for delete QueryGroup + * + * @opensearch.experimental + */ +public class TransportDeleteQueryGroupAction extends TransportClusterManagerNodeAction { + + private final QueryGroupPersistenceService queryGroupPersistenceService; + + /** + * Constructor for TransportDeleteQueryGroupAction + * + * @param clusterService - a {@link ClusterService} object + * @param transportService - a {@link TransportService} object + * @param actionFilters - a {@link ActionFilters} object + * @param threadPool - a {@link ThreadPool} object + * @param indexNameExpressionResolver - a {@link IndexNameExpressionResolver} object + * @param queryGroupPersistenceService - a {@link QueryGroupPersistenceService} object + */ + @Inject + public TransportDeleteQueryGroupAction( + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + ThreadPool threadPool, + IndexNameExpressionResolver indexNameExpressionResolver, + QueryGroupPersistenceService queryGroupPersistenceService + ) { + super( + DeleteQueryGroupAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + DeleteQueryGroupRequest::new, + indexNameExpressionResolver + ); + this.queryGroupPersistenceService = queryGroupPersistenceService; + } + + @Override + protected void clusterManagerOperation( + DeleteQueryGroupRequest request, + ClusterState state, + ActionListener listener + ) throws Exception { + queryGroupPersistenceService.deleteInClusterStateMetadata(request, listener); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse read(StreamInput in) throws IOException { + return new AcknowledgedResponse(in); + } + + @Override + protected ClusterBlockException checkBlock(DeleteQueryGroupRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupAction.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupAction.java new file mode 100644 index 0000000000000..8ad621cf8a1e4 --- /dev/null +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupAction.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.rest; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.plugin.wlm.action.DeleteQueryGroupAction; +import org.opensearch.plugin.wlm.action.DeleteQueryGroupRequest; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.DELETE; + +/** + * Rest action to delete a QueryGroup + * + * @opensearch.experimental + */ +public class RestDeleteQueryGroupAction extends BaseRestHandler { + + /** + * Constructor for RestDeleteQueryGroupAction + */ + public RestDeleteQueryGroupAction() {} + + @Override + public String getName() { + return "delete_query_group"; + } + + /** + * The list of {@link Route}s that this RestHandler is responsible for handling. + */ + @Override + public List routes() { + return List.of(new Route(DELETE, "_wlm/query_group/{name}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + DeleteQueryGroupRequest deleteQueryGroupRequest = new DeleteQueryGroupRequest(request.param("name")); + deleteQueryGroupRequest.clusterManagerNodeTimeout( + request.paramAsTime("cluster_manager_timeout", deleteQueryGroupRequest.clusterManagerNodeTimeout()) + ); + deleteQueryGroupRequest.timeout(request.paramAsTime("timeout", deleteQueryGroupRequest.timeout())); + return channel -> client.execute(DeleteQueryGroupAction.INSTANCE, deleteQueryGroupRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java index fe7080da78bbe..ba5161a2c855e 100644 --- a/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java +++ b/plugins/workload-management/src/main/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceService.java @@ -10,10 +10,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.cluster.AckedClusterStateUpdateTask; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.ClusterStateUpdateTask; import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.metadata.QueryGroup; +import org.opensearch.cluster.service.ClusterManagerTaskThrottler; import org.opensearch.cluster.service.ClusterManagerTaskThrottler.ThrottlingKey; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Priority; @@ -24,6 +28,7 @@ import org.opensearch.core.action.ActionListener; import org.opensearch.core.rest.RestStatus; import org.opensearch.plugin.wlm.action.CreateQueryGroupResponse; +import org.opensearch.plugin.wlm.action.DeleteQueryGroupRequest; import org.opensearch.search.ResourceType; import java.util.Collection; @@ -38,6 +43,7 @@ public class QueryGroupPersistenceService { static final String SOURCE = "query-group-persistence-service"; private static final String CREATE_QUERY_GROUP_THROTTLING_KEY = "create-query-group"; + private static final String DELETE_QUERY_GROUP_THROTTLING_KEY = "delete-query-group"; private static final Logger logger = LogManager.getLogger(QueryGroupPersistenceService.class); /** * max QueryGroup count setting name @@ -65,6 +71,7 @@ public class QueryGroupPersistenceService { private final ClusterService clusterService; private volatile int maxQueryGroupCount; final ThrottlingKey createQueryGroupThrottlingKey; + final ThrottlingKey deleteQueryGroupThrottlingKey; /** * Constructor for QueryGroupPersistenceService @@ -81,6 +88,7 @@ public QueryGroupPersistenceService( ) { this.clusterService = clusterService; this.createQueryGroupThrottlingKey = clusterService.registerClusterManagerTask(CREATE_QUERY_GROUP_THROTTLING_KEY, true); + this.deleteQueryGroupThrottlingKey = clusterService.registerClusterManagerTask(DELETE_QUERY_GROUP_THROTTLING_KEY, true); setMaxQueryGroupCount(MAX_QUERY_GROUP_COUNT.get(settings)); clusterSettings.addSettingsUpdateConsumer(MAX_QUERY_GROUP_COUNT, this::setMaxQueryGroupCount); } @@ -212,6 +220,50 @@ public static Collection getFromClusterStateMetadata(String name, Cl .collect(Collectors.toList()); } + /** + * Modify cluster state to delete the QueryGroup + * @param deleteQueryGroupRequest - request to delete a QueryGroup + * @param listener - ActionListener for AcknowledgedResponse + */ + public void deleteInClusterStateMetadata( + DeleteQueryGroupRequest deleteQueryGroupRequest, + ActionListener listener + ) { + clusterService.submitStateUpdateTask(SOURCE, new AckedClusterStateUpdateTask<>(deleteQueryGroupRequest, listener) { + @Override + public ClusterState execute(ClusterState currentState) { + return deleteQueryGroupInClusterState(deleteQueryGroupRequest.getName(), currentState); + } + + @Override + public ClusterManagerTaskThrottler.ThrottlingKey getClusterManagerThrottlingKey() { + return deleteQueryGroupThrottlingKey; + } + + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } + }); + } + + /** + * Modify cluster state to delete the QueryGroup, and return the new cluster state + * @param name - the name for QueryGroup to be deleted + * @param currentClusterState - current cluster state + */ + ClusterState deleteQueryGroupInClusterState(final String name, final ClusterState currentClusterState) { + final Metadata metadata = currentClusterState.metadata(); + final QueryGroup queryGroupToRemove = metadata.queryGroups() + .values() + .stream() + .filter(queryGroup -> queryGroup.getName().equals(name)) + .findAny() + .orElseThrow(() -> new ResourceNotFoundException("No QueryGroup exists with the provided name: " + name)); + + return ClusterState.builder(currentClusterState).metadata(Metadata.builder(metadata).remove(queryGroupToRemove).build()).build(); + } + /** * maxQueryGroupCount getter */ diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/CreateQueryGroupResponseTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/CreateQueryGroupResponseTests.java index 038f015713c5b..ecb9a6b2dc0d2 100644 --- a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/CreateQueryGroupResponseTests.java +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/CreateQueryGroupResponseTests.java @@ -27,7 +27,7 @@ public class CreateQueryGroupResponseTests extends OpenSearchTestCase { /** - * Test case to verify the serialization and deserialization of CreateQueryGroupResponse. + * Test case to verify serialization and deserialization of CreateQueryGroupResponse. */ public void testSerialization() throws IOException { CreateQueryGroupResponse response = new CreateQueryGroupResponse(QueryGroupTestUtils.queryGroupOne, RestStatus.OK); @@ -46,7 +46,7 @@ public void testSerialization() throws IOException { } /** - * Test case to verify the toXContent method of CreateQueryGroupResponse. + * Test case to validate the toXContent method of CreateQueryGroupResponse. */ public void testToXContentCreateQueryGroup() throws IOException { XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequestTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequestTests.java new file mode 100644 index 0000000000000..bc2e4f0faca4c --- /dev/null +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/DeleteQueryGroupRequestTests.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.wlm.QueryGroupTestUtils; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; + +public class DeleteQueryGroupRequestTests extends OpenSearchTestCase { + + /** + * Test case to verify the serialization and deserialization of DeleteQueryGroupRequest. + */ + public void testSerialization() throws IOException { + DeleteQueryGroupRequest request = new DeleteQueryGroupRequest(QueryGroupTestUtils.NAME_ONE); + assertEquals(QueryGroupTestUtils.NAME_ONE, request.getName()); + BytesStreamOutput out = new BytesStreamOutput(); + request.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + DeleteQueryGroupRequest otherRequest = new DeleteQueryGroupRequest(streamInput); + assertEquals(request.getName(), otherRequest.getName()); + } + + /** + * Test case to validate a DeleteQueryGroupRequest. + */ + public void testSerializationWithNull() throws IOException { + DeleteQueryGroupRequest request = new DeleteQueryGroupRequest((String) null); + ActionRequestValidationException actionRequestValidationException = request.validate(); + assertFalse(actionRequestValidationException.getMessage().isEmpty()); + } +} diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupActionTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupActionTests.java new file mode 100644 index 0000000000000..253d65f8da80f --- /dev/null +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/action/TransportDeleteQueryGroupActionTests.java @@ -0,0 +1,63 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.action; + +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.core.action.ActionListener; +import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +public class TransportDeleteQueryGroupActionTests extends OpenSearchTestCase { + + ClusterService clusterService = mock(ClusterService.class); + TransportService transportService = mock(TransportService.class); + ActionFilters actionFilters = mock(ActionFilters.class); + ThreadPool threadPool = mock(ThreadPool.class); + IndexNameExpressionResolver indexNameExpressionResolver = mock(IndexNameExpressionResolver.class); + QueryGroupPersistenceService queryGroupPersistenceService = mock(QueryGroupPersistenceService.class); + + TransportDeleteQueryGroupAction action = new TransportDeleteQueryGroupAction( + clusterService, + transportService, + actionFilters, + threadPool, + indexNameExpressionResolver, + queryGroupPersistenceService + ); + + /** + * Test case to validate the construction for TransportDeleteQueryGroupAction + */ + public void testConstruction() { + assertNotNull(action); + assertEquals(ThreadPool.Names.SAME, action.executor()); + } + + /** + * Test case to validate the clusterManagerOperation function in TransportDeleteQueryGroupAction + */ + public void testClusterManagerOperation() throws Exception { + DeleteQueryGroupRequest request = new DeleteQueryGroupRequest("testGroup"); + @SuppressWarnings("unchecked") + ActionListener listener = mock(ActionListener.class); + ClusterState clusterState = mock(ClusterState.class); + action.clusterManagerOperation(request, clusterState, listener); + verify(queryGroupPersistenceService).deleteInClusterStateMetadata(eq(request), eq(listener)); + } +} diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupActionTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupActionTests.java new file mode 100644 index 0000000000000..72191e076bb87 --- /dev/null +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/rest/RestDeleteQueryGroupActionTests.java @@ -0,0 +1,85 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.wlm.rest; + +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.CheckedConsumer; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.plugin.wlm.action.DeleteQueryGroupAction; +import org.opensearch.plugin.wlm.action.DeleteQueryGroupRequest; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.rest.FakeRestRequest; + +import java.util.List; + +import org.mockito.ArgumentCaptor; + +import static org.opensearch.plugin.wlm.QueryGroupTestUtils.NAME_ONE; +import static org.opensearch.rest.RestRequest.Method.DELETE; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; + +public class RestDeleteQueryGroupActionTests extends OpenSearchTestCase { + /** + * Test case to validate the construction for RestDeleteQueryGroupAction + */ + public void testConstruction() { + RestDeleteQueryGroupAction action = new RestDeleteQueryGroupAction(); + assertNotNull(action); + assertEquals("delete_query_group", action.getName()); + List routes = action.routes(); + assertEquals(1, routes.size()); + RestHandler.Route route = routes.get(0); + assertEquals(DELETE, route.getMethod()); + assertEquals("_wlm/query_group/{name}", route.getPath()); + } + + /** + * Test case to validate the prepareRequest logic for RestDeleteQueryGroupAction + */ + @SuppressWarnings("unchecked") + public void testPrepareRequest() throws Exception { + RestDeleteQueryGroupAction restDeleteQueryGroupAction = new RestDeleteQueryGroupAction(); + NodeClient nodeClient = mock(NodeClient.class); + RestRequest realRequest = new FakeRestRequest(); + realRequest.params().put("name", NAME_ONE); + ; + RestRequest spyRequest = spy(realRequest); + + doReturn(TimeValue.timeValueSeconds(30)).when(spyRequest).paramAsTime(eq("cluster_manager_timeout"), any(TimeValue.class)); + doReturn(TimeValue.timeValueSeconds(60)).when(spyRequest).paramAsTime(eq("timeout"), any(TimeValue.class)); + + CheckedConsumer consumer = restDeleteQueryGroupAction.prepareRequest(spyRequest, nodeClient); + assertNotNull(consumer); + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(DeleteQueryGroupRequest.class); + ArgumentCaptor> listenerCaptor = ArgumentCaptor.forClass(RestToXContentListener.class); + doNothing().when(nodeClient).execute(eq(DeleteQueryGroupAction.INSTANCE), requestCaptor.capture(), listenerCaptor.capture()); + + consumer.accept(mock(RestChannel.class)); + DeleteQueryGroupRequest capturedRequest = requestCaptor.getValue(); + assertEquals(NAME_ONE, capturedRequest.getName()); + assertEquals(TimeValue.timeValueSeconds(30), capturedRequest.clusterManagerNodeTimeout()); + assertEquals(TimeValue.timeValueSeconds(60), capturedRequest.timeout()); + verify(nodeClient).execute( + eq(DeleteQueryGroupAction.INSTANCE), + any(DeleteQueryGroupRequest.class), + any(RestToXContentListener.class) + ); + } +} diff --git a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java index 2aa3b9e168852..a516ffdde839e 100644 --- a/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java +++ b/plugins/workload-management/src/test/java/org/opensearch/plugin/wlm/service/QueryGroupPersistenceServiceTests.java @@ -8,6 +8,9 @@ package org.opensearch.plugin.wlm.service; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.cluster.AckedClusterStateUpdateTask; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.ClusterStateUpdateTask; @@ -20,6 +23,7 @@ import org.opensearch.core.action.ActionListener; import org.opensearch.plugin.wlm.QueryGroupTestUtils; import org.opensearch.plugin.wlm.action.CreateQueryGroupResponse; +import org.opensearch.plugin.wlm.action.DeleteQueryGroupRequest; import org.opensearch.search.ResourceType; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; @@ -39,6 +43,7 @@ import static org.opensearch.plugin.wlm.QueryGroupTestUtils.MONITOR_STRING; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.NAME_NONE_EXISTED; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.NAME_ONE; +import static org.opensearch.plugin.wlm.QueryGroupTestUtils.NAME_TWO; import static org.opensearch.plugin.wlm.QueryGroupTestUtils._ID_ONE; import static org.opensearch.plugin.wlm.QueryGroupTestUtils._ID_TWO; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.assertEqualQueryGroups; @@ -48,12 +53,14 @@ import static org.opensearch.plugin.wlm.QueryGroupTestUtils.preparePersistenceServiceSetup; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.queryGroupList; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.queryGroupOne; +import static org.opensearch.plugin.wlm.QueryGroupTestUtils.queryGroupPersistenceService; import static org.opensearch.plugin.wlm.QueryGroupTestUtils.queryGroupTwo; import static org.opensearch.plugin.wlm.service.QueryGroupPersistenceService.QUERY_GROUP_COUNT_SETTING_NAME; import static org.opensearch.plugin.wlm.service.QueryGroupPersistenceService.SOURCE; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.argThat; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -298,4 +305,55 @@ public void testMaxQueryGroupCount() { queryGroupPersistenceService.setMaxQueryGroupCount(50); assertEquals(50, queryGroupPersistenceService.getMaxQueryGroupCount()); } + + /** + * Tests delete a single QueryGroup + */ + public void testDeleteSingleQueryGroup() { + ClusterState newClusterState = queryGroupPersistenceService().deleteQueryGroupInClusterState(NAME_TWO, clusterState()); + Map afterDeletionGroups = newClusterState.getMetadata().queryGroups(); + assertFalse(afterDeletionGroups.containsKey(_ID_TWO)); + assertEquals(1, afterDeletionGroups.size()); + List oldQueryGroups = new ArrayList<>(); + oldQueryGroups.add(queryGroupOne); + assertEqualQueryGroups(new ArrayList<>(afterDeletionGroups.values()), oldQueryGroups); + } + + /** + * Tests delete a QueryGroup with invalid name + */ + public void testDeleteNonExistedQueryGroup() { + assertThrows( + ResourceNotFoundException.class, + () -> queryGroupPersistenceService().deleteQueryGroupInClusterState(NAME_NONE_EXISTED, clusterState()) + ); + } + + /** + * Tests DeleteInClusterStateMetadata function + */ + @SuppressWarnings("unchecked") + public void testDeleteInClusterStateMetadata() throws Exception { + DeleteQueryGroupRequest request = new DeleteQueryGroupRequest(NAME_ONE); + ClusterService clusterService = mock(ClusterService.class); + + ActionListener listener = mock(ActionListener.class); + QueryGroupPersistenceService queryGroupPersistenceService = new QueryGroupPersistenceService( + clusterService, + QueryGroupTestUtils.settings(), + clusterSettings() + ); + doAnswer(invocation -> { + AckedClusterStateUpdateTask task = invocation.getArgument(1); + ClusterState initialState = clusterState(); + ClusterState newState = task.execute(initialState); + assertNotNull(newState); + assertEquals(queryGroupPersistenceService.deleteQueryGroupThrottlingKey, task.getClusterManagerThrottlingKey()); + task.onAllNodesAcked(null); + verify(listener).onResponse(argThat(response -> response.isAcknowledged())); + return null; + }).when(clusterService).submitStateUpdateTask(anyString(), any()); + queryGroupPersistenceService.deleteInClusterStateMetadata(request, listener); + verify(clusterService).submitStateUpdateTask(eq(SOURCE), any(AckedClusterStateUpdateTask.class)); + } } diff --git a/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/delete_query_group_context.json b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/delete_query_group_context.json new file mode 100644 index 0000000000000..16930427fc2fe --- /dev/null +++ b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/api/delete_query_group_context.json @@ -0,0 +1,22 @@ +{ + "delete_query_group_context": { + "stability": "experimental", + "url": { + "paths": [ + { + "path": "/_wlm/query_group/{name}", + "methods": [ + "DELETE" + ], + "parts": { + "name": { + "type": "string", + "description": "QueryGroup name" + } + } + } + ] + }, + "params":{} + } +} diff --git a/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_query_group.yml b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_query_group.yml index a22dfa2f4477e..a00314986a5cf 100644 --- a/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_query_group.yml +++ b/plugins/workload-management/src/yamlRestTest/resources/rest-api-spec/test/wlm/10_query_group.yml @@ -106,3 +106,9 @@ - match: { query_groups.0.resiliency_mode: "monitor" } - match: { query_groups.0.resource_limits.cpu: 0.35 } - match: { query_groups.0.resource_limits.memory: 0.25 } + + - do: + delete_query_group_context: + name: "analytics2" + + - match: { acknowledged: true } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java index 6f5b4bba481dd..c461f83657340 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/StarTreeMapperIT.java @@ -265,13 +265,9 @@ public void testValidCompositeIndex() { assertEquals(expectedTimeUnits, dateDim.getIntervals()); assertEquals("numeric_dv", starTreeFieldType.getDimensions().get(1).getField()); assertEquals("numeric_dv", starTreeFieldType.getMetrics().get(0).getField()); - List expectedMetrics = Arrays.asList( - MetricStat.AVG, - MetricStat.VALUE_COUNT, - MetricStat.SUM, - MetricStat.MAX, - MetricStat.MIN - ); + + // Assert default metrics + List expectedMetrics = Arrays.asList(MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.AVG); assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); assertEquals(10000, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); assertEquals( @@ -349,13 +345,9 @@ public void testUpdateIndexWhenMappingIsSame() { assertEquals(expectedTimeUnits, dateDim.getIntervals()); assertEquals("numeric_dv", starTreeFieldType.getDimensions().get(1).getField()); assertEquals("numeric_dv", starTreeFieldType.getMetrics().get(0).getField()); - List expectedMetrics = Arrays.asList( - MetricStat.AVG, - MetricStat.VALUE_COUNT, - MetricStat.SUM, - MetricStat.MAX, - MetricStat.MIN - ); + + // Assert default metrics + List expectedMetrics = Arrays.asList(MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.AVG); assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); assertEquals(10000, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); assertEquals( diff --git a/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java b/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java new file mode 100644 index 0000000000000..67ee45f4c9306 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.apache.lucene.codecs.lucene90; + +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.index.SegmentWriteState; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This class is an abstraction of the {@link DocValuesConsumer} for the Star Tree index structure. + * It is responsible to consume various types of document values (numeric, binary, sorted, sorted numeric, + * and sorted set) for fields in the Star Tree index. + * + * @opensearch.experimental + */ +public class Lucene90DocValuesConsumerWrapper implements Closeable { + + private final Lucene90DocValuesConsumer lucene90DocValuesConsumer; + + public Lucene90DocValuesConsumerWrapper( + SegmentWriteState state, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { + lucene90DocValuesConsumer = new Lucene90DocValuesConsumer(state, dataCodec, dataExtension, metaCodec, metaExtension); + } + + public Lucene90DocValuesConsumer getLucene90DocValuesConsumer() { + return lucene90DocValuesConsumer; + } + + @Override + public void close() throws IOException { + lucene90DocValuesConsumer.close(); + } +} diff --git a/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesProducerWrapper.java b/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesProducerWrapper.java new file mode 100644 index 0000000000000..a213852c59094 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesProducerWrapper.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.apache.lucene.codecs.lucene90; + +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.SegmentReadState; + +import java.io.Closeable; +import java.io.IOException; + +/** + * This class is a custom abstraction of the {@link DocValuesProducer} for the Star Tree index structure. + * It is responsible for providing access to various types of document values (numeric, binary, sorted, sorted numeric, + * and sorted set) for fields in the Star Tree index. + * + * @opensearch.experimental + */ +public class Lucene90DocValuesProducerWrapper implements Closeable { + + private final Lucene90DocValuesProducer lucene90DocValuesProducer; + + public Lucene90DocValuesProducerWrapper( + SegmentReadState state, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { + lucene90DocValuesProducer = new Lucene90DocValuesProducer(state, dataCodec, dataExtension, metaCodec, metaExtension); + } + + public DocValuesProducer getLucene90DocValuesProducer() { + return lucene90DocValuesProducer; + } + + @Override + public void close() throws IOException { + lucene90DocValuesProducer.close(); + } +} diff --git a/server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java b/server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java new file mode 100644 index 0000000000000..f7759fcced284 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/index/SortedNumericDocValuesWriterWrapper.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.apache.lucene.index; + +import org.apache.lucene.util.Counter; + +/** + * A wrapper class for writing sorted numeric doc values. + *

+ * This class provides a convenient way to add sorted numeric doc values to a field + * and retrieve the corresponding {@link SortedNumericDocValues} instance. + * + * @opensearch.experimental + */ +public class SortedNumericDocValuesWriterWrapper { + + private final SortedNumericDocValuesWriter sortedNumericDocValuesWriter; + + /** + * Sole constructor. Constructs a new {@link SortedNumericDocValuesWriterWrapper} instance. + * + * @param fieldInfo the field information for the field being written + * @param counter a counter for tracking memory usage + */ + public SortedNumericDocValuesWriterWrapper(FieldInfo fieldInfo, Counter counter) { + sortedNumericDocValuesWriter = new SortedNumericDocValuesWriter(fieldInfo, counter); + } + + /** + * Adds a value to the sorted numeric doc values for the specified document. + * + * @param docID the document ID + * @param value the value to add + */ + public void addValue(int docID, long value) { + sortedNumericDocValuesWriter.addValue(docID, value); + } + + /** + * Returns the {@link SortedNumericDocValues} instance containing the sorted numeric doc values + * + * @return the {@link SortedNumericDocValues} instance + */ + public SortedNumericDocValues getDocValues() { + return sortedNumericDocValuesWriter.getDocValues(); + } +} diff --git a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java index f77a7ffc8ce8e..1fba63616aa96 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java @@ -458,7 +458,7 @@ public static void ensureNodesCompatibility( ); } - ensureRemoteStoreNodesCompatibility(joiningNode, currentNodes, metadata); + ensureRemoteRepositoryCompatibility(joiningNode, currentNodes, metadata); } /** @@ -491,6 +491,33 @@ public static void ensureNodeCommissioned(DiscoveryNode node, Metadata metadata) } } + public static void ensureRemoteRepositoryCompatibility(DiscoveryNode joiningNode, DiscoveryNodes currentNodes, Metadata metadata) { + List existingNodes = new ArrayList<>(currentNodes.getNodes().values()); + + boolean isClusterRemoteStoreEnabled = existingNodes.stream().anyMatch(DiscoveryNode::isRemoteStoreNode); + if (isClusterRemoteStoreEnabled || joiningNode.isRemoteStoreNode()) { + ensureRemoteStoreNodesCompatibility(joiningNode, currentNodes, metadata); + } else { + ensureRemoteClusterStateNodesCompatibility(joiningNode, currentNodes); + } + } + + private static void ensureRemoteClusterStateNodesCompatibility(DiscoveryNode joiningNode, DiscoveryNodes currentNodes) { + List existingNodes = new ArrayList<>(currentNodes.getNodes().values()); + + assert existingNodes.isEmpty() == false; + Optional remotePublicationNode = existingNodes.stream() + .filter(DiscoveryNode::isRemoteStatePublicationEnabled) + .findFirst(); + + if (remotePublicationNode.isPresent() && joiningNode.isRemoteStatePublicationEnabled()) { + List reposToValidate = new ArrayList<>(2); + reposToValidate.add(RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY); + reposToValidate.add(RemoteStoreNodeAttribute.REMOTE_STORE_ROUTING_TABLE_REPOSITORY_NAME_ATTRIBUTE_KEY); + ensureRepositoryCompatibility(joiningNode, remotePublicationNode.get(), reposToValidate); + } + } + /** * The method ensures homogeneity - * 1. The joining node has to be a remote store backed if it's joining a remote store backed cluster. Validates @@ -506,6 +533,7 @@ public static void ensureNodeCommissioned(DiscoveryNode node, Metadata metadata) * needs to be modified. */ private static void ensureRemoteStoreNodesCompatibility(DiscoveryNode joiningNode, DiscoveryNodes currentNodes, Metadata metadata) { + List existingNodes = new ArrayList<>(currentNodes.getNodes().values()); assert existingNodes.isEmpty() == false; @@ -587,6 +615,23 @@ private static void ensureRemoteStoreNodesCompatibility( } } + private static void ensureRepositoryCompatibility(DiscoveryNode joiningNode, DiscoveryNode existingNode, List reposToValidate) { + + RemoteStoreNodeAttribute joiningRemoteStoreNodeAttribute = new RemoteStoreNodeAttribute(joiningNode); + RemoteStoreNodeAttribute existingRemoteStoreNodeAttribute = new RemoteStoreNodeAttribute(existingNode); + + if (existingRemoteStoreNodeAttribute.equalsForRepositories(joiningRemoteStoreNodeAttribute, reposToValidate) == false) { + throw new IllegalStateException( + "a remote store node [" + + joiningNode + + "] is trying to join a remote store cluster with incompatible node attributes in " + + "comparison with existing node [" + + existingNode + + "]" + ); + } + } + public static Collection> addBuiltInJoinValidators( Collection> onJoinValidators ) { diff --git a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java index 4da6c68b40733..6163fd624c838 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java @@ -1397,7 +1397,14 @@ public Builder put(final QueryGroup queryGroup) { return queryGroups(existing); } - public Map getQueryGroups() { + public Builder remove(final QueryGroup queryGroup) { + Objects.requireNonNull(queryGroup, "queryGroup should not be null"); + Map existing = new HashMap<>(getQueryGroups()); + existing.remove(queryGroup.get_id()); + return queryGroups(existing); + } + + private Map getQueryGroups() { return Optional.ofNullable(this.customs.get(QueryGroupMetadata.TYPE)) .map(o -> (QueryGroupMetadata) o) .map(QueryGroupMetadata::queryGroups) @@ -1830,9 +1837,7 @@ static void validateDataStreams(SortedMap indicesLooku if (dsMetadata != null) { for (DataStream ds : dsMetadata.dataStreams().values()) { String prefix = DataStream.BACKING_INDEX_PREFIX + ds.getName() + "-"; - Set conflicts = indicesLookup.subMap(prefix, DataStream.BACKING_INDEX_PREFIX + ds.getName() + ".") // '.' is the - // char after - // '-' + Set conflicts = indicesLookup.subMap(prefix, DataStream.BACKING_INDEX_PREFIX + ds.getName() + ".") .keySet() .stream() .filter(s -> NUMBER_PATTERN.matcher(s.substring(prefix.length())).matches()) diff --git a/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java index 4b3dc7964a87b..59452e33191d7 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/RepositoriesMetadata.java @@ -184,6 +184,24 @@ public boolean equalsIgnoreGenerationsWithRepoSkip(@Nullable RepositoriesMetadat .filter(repo -> !reposToSkip.contains(repo.name())) .collect(Collectors.toList()); + return equalsRepository(currentRepositories, otherRepositories); + } + + public boolean equalsIgnoreGenerationsForRepo(@Nullable RepositoriesMetadata other, List reposToValidate) { + if (other == null) { + return false; + } + List currentRepositories = repositories.stream() + .filter(repo -> reposToValidate.contains(repo.name())) + .collect(Collectors.toList()); + List otherRepositories = other.repositories.stream() + .filter(repo -> reposToValidate.contains(repo.name())) + .collect(Collectors.toList()); + + return equalsRepository(currentRepositories, otherRepositories); + } + + public static boolean equalsRepository(List currentRepositories, List otherRepositories) { if (otherRepositories.size() != currentRepositories.size()) { return false; } diff --git a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java index 653f81830ed17..a0f97e3d4c513 100644 --- a/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java +++ b/server/src/main/java/org/opensearch/cluster/node/DiscoveryNode.java @@ -64,6 +64,7 @@ import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_NODE_ATTRIBUTE_KEY_PREFIX; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_ROUTING_TABLE_REPOSITORY_NAME_ATTRIBUTE_KEY; +import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; /** * A discovery node represents a node that is part of the cluster. @@ -473,7 +474,8 @@ public boolean isSearchNode() { * @return true if the node contains remote store node attributes, false otherwise */ public boolean isRemoteStoreNode() { - return this.getAttributes().keySet().stream().anyMatch(key -> key.startsWith(REMOTE_STORE_NODE_ATTRIBUTE_KEY_PREFIX)); + return this.getAttributes().keySet().stream().anyMatch(key -> key.equals(REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY)) + && this.getAttributes().keySet().stream().anyMatch(key -> key.equals(REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY)); } /** diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index d5e8e90458390..49ef87838ed2e 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -173,6 +173,7 @@ import org.opensearch.transport.SniffConnectionStrategy; import org.opensearch.transport.TransportSettings; import org.opensearch.watcher.ResourceWatcherService; +import org.opensearch.wlm.WorkloadManagementSettings; import java.util.Arrays; import java.util.Collections; @@ -766,7 +767,13 @@ public void apply(Settings value, Settings current, Settings previous) { // Composite index settings CompositeIndexSettings.STAR_TREE_INDEX_ENABLED_SETTING, - SystemTemplatesService.SETTING_APPLICATION_BASED_CONFIGURATION_TEMPLATES_ENABLED + SystemTemplatesService.SETTING_APPLICATION_BASED_CONFIGURATION_TEMPLATES_ENABLED, + + // WorkloadManagement settings + WorkloadManagementSettings.NODE_LEVEL_CPU_REJECTION_THRESHOLD, + WorkloadManagementSettings.NODE_LEVEL_CPU_CANCELLATION_THRESHOLD, + WorkloadManagementSettings.NODE_LEVEL_MEMORY_REJECTION_THRESHOLD, + WorkloadManagementSettings.NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD ) ) ); diff --git a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java index a4d60bc76127c..284eb43aa5509 100644 --- a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java @@ -238,6 +238,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { // Settings for concurrent segment search IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING, + IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_MAX_SLICE_COUNT, IndexSettings.ALLOW_DERIVED_FIELDS, // Settings for star tree index diff --git a/server/src/main/java/org/opensearch/index/IndexSettings.java b/server/src/main/java/org/opensearch/index/IndexSettings.java index a833d66fab5d9..9cab68d646b6e 100644 --- a/server/src/main/java/org/opensearch/index/IndexSettings.java +++ b/server/src/main/java/org/opensearch/index/IndexSettings.java @@ -76,6 +76,7 @@ import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING; import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING; import static org.opensearch.index.store.remote.directory.RemoteSnapshotDirectory.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY_MINIMUM_VERSION; +import static org.opensearch.search.SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE; /** * This class encapsulates all index level settings and handles settings updates. @@ -691,6 +692,14 @@ public static IndexMergePolicy fromString(String text) { Property.Dynamic ); + public static final Setting INDEX_CONCURRENT_SEGMENT_SEARCH_MAX_SLICE_COUNT = Setting.intSetting( + "index.search.concurrent.max_slice_count", + CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE, + CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_DEFAULT_VALUE, + Property.Dynamic, + Property.IndexScope + ); + public static final Setting INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING = Setting.boolSetting( "index.optimize_doc_id_lookup.fuzzy_set.enabled", false, diff --git a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java index 3acedc6a27d7f..99691d7061ac9 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java @@ -12,6 +12,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.index.codec.composite.composite99.Composite99Codec; import org.opensearch.index.mapper.MapperService; import java.util.HashMap; @@ -29,6 +30,10 @@ */ @ExperimentalApi public class CompositeCodecFactory { + + // we can use this to track the latest composite codec + public static final String COMPOSITE_CODEC = Composite99Codec.COMPOSITE_INDEX_CODEC_NAME; + public CompositeCodecFactory() {} public Map getCompositeIndexCodecs(MapperService mapperService, Logger logger) { diff --git a/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java new file mode 100644 index 0000000000000..1ed672870337e --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java @@ -0,0 +1,50 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite; + +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesConsumerWrapper; +import org.apache.lucene.index.SegmentWriteState; + +import java.io.IOException; + +/** + * A factory class that provides a factory method for creating {@link DocValuesConsumer} instances + * for the latest composite codec. + *

+ * The segments are written using the latest composite codec. The codec + * internally manages calling the appropriate consumer factory for its abstractions. + *

+ * This design ensures forward compatibility for writing operations + * + * @opensearch.experimental + */ +public class LuceneDocValuesConsumerFactory { + + public static DocValuesConsumer getDocValuesConsumerForCompositeCodec( + SegmentWriteState state, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { + try ( + Lucene90DocValuesConsumerWrapper lucene90DocValuesConsumerWrapper = new Lucene90DocValuesConsumerWrapper( + state, + dataCodec, + dataExtension, + metaCodec, + metaExtension + ) + ) { + return lucene90DocValuesConsumerWrapper.getLucene90DocValuesConsumer(); + } + } + +} diff --git a/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java new file mode 100644 index 0000000000000..611a97ffeb834 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactory.java @@ -0,0 +1,60 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite; + +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesProducerWrapper; +import org.apache.lucene.index.SegmentReadState; +import org.opensearch.index.codec.composite.composite99.Composite99Codec; + +import java.io.IOException; + +/** + * A factory class that provides a factory method for creating {@link DocValuesProducer} instances + * based on the specified composite codec. + *

+ * In producers, we want to ensure compatibility with older codec versions during the segment reads. + * This approach allows for writing with only the latest codec while maintaining + * the ability to read data encoded with any codec version present in the segment. + *

+ * This design ensures backward compatibility for reads across different codec versions. + * + * @opensearch.experimental + */ +public class LuceneDocValuesProducerFactory { + + public static DocValuesProducer getDocValuesProducerForCompositeCodec( + String compositeCodec, + SegmentReadState state, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { + + switch (compositeCodec) { + case Composite99Codec.COMPOSITE_INDEX_CODEC_NAME: + try ( + Lucene90DocValuesProducerWrapper lucene90DocValuesProducerWrapper = new Lucene90DocValuesProducerWrapper( + state, + dataCodec, + dataExtension, + metaCodec, + metaExtension + ) + ) { + return lucene90DocValuesProducerWrapper.getLucene90DocValuesProducer(); + } + default: + throw new IllegalStateException("Invalid composite codec " + "[" + compositeCodec + "]"); + } + + } + +} diff --git a/server/src/main/java/org/opensearch/index/codec/composite/Composite99Codec.java b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99Codec.java similarity index 97% rename from server/src/main/java/org/opensearch/index/codec/composite/Composite99Codec.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99Codec.java index de04944e67cd2..8422932e937c2 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/Composite99Codec.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99Codec.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite; +package org.opensearch.index.codec.composite.composite99; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesFormat.java b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesFormat.java similarity index 97% rename from server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesFormat.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesFormat.java index 216ed4f68f333..e8c69b11b7c88 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesFormat.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesFormat.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite; +package org.opensearch.index.codec.composite.composite99; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesFormat; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesReader.java b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesReader.java similarity index 91% rename from server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesReader.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesReader.java index df5008a7f294e..e3bfe01cfa2d5 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesReader.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesReader.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite; +package org.opensearch.index.codec.composite.composite99; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.index.BinaryDocValues; @@ -17,6 +17,9 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; +import org.opensearch.index.codec.composite.CompositeIndexReader; +import org.opensearch.index.codec.composite.CompositeIndexValues; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesWriter.java b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesWriter.java similarity index 97% rename from server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesWriter.java rename to server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesWriter.java index 6ed1a8c42e380..da784e1232800 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/Composite99DocValuesWriter.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite99/Composite99DocValuesWriter.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.index.codec.composite; +package org.opensearch.index.codec.composite.composite99; import org.apache.lucene.codecs.DocValuesConsumer; import org.apache.lucene.codecs.DocValuesProducer; @@ -18,6 +18,9 @@ import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.SortedNumericDocValues; import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.index.codec.composite.CompositeIndexFieldInfo; +import org.opensearch.index.codec.composite.CompositeIndexReader; +import org.opensearch.index.codec.composite.CompositeIndexValues; import org.opensearch.index.codec.composite.datacube.startree.StarTreeValues; import org.opensearch.index.compositeindex.datacube.startree.StarTreeField; import org.opensearch.index.compositeindex.datacube.startree.builder.StarTreesBuilder; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite99/package-info.java b/server/src/main/java/org/opensearch/index/codec/composite/composite99/package-info.java new file mode 100644 index 0000000000000..3d6f130b9a7c8 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite99/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Responsible for handling all composite index codecs and operations associated with Composite99 codec + */ +package org.opensearch.index.codec.composite.composite99; diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java index df3b2229d2c5b..84eaaeb637962 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/MetricStat.java @@ -10,6 +10,9 @@ import org.opensearch.common.annotation.ExperimentalApi; +import java.util.Arrays; +import java.util.List; + /** * Supported metric types for composite index * @@ -18,21 +21,39 @@ @ExperimentalApi public enum MetricStat { VALUE_COUNT("value_count"), - AVG("avg"), SUM("sum"), MIN("min"), - MAX("max"); + MAX("max"), + AVG("avg", VALUE_COUNT, SUM); private final String typeName; + private final MetricStat[] baseMetrics; - MetricStat(String typeName) { + MetricStat(String typeName, MetricStat... baseMetrics) { this.typeName = typeName; + this.baseMetrics = baseMetrics; } public String getTypeName() { return typeName; } + /** + * Return the list of metrics that this metric is derived from + * For example, AVG is derived from COUNT and SUM + */ + public List getBaseMetrics() { + return Arrays.asList(baseMetrics); + } + + /** + * Return true if this metric is derived from other metrics + * For example, AVG is derived from COUNT and SUM + */ + public boolean isDerivedMetric() { + return baseMetrics != null && baseMetrics.length > 0; + } + public static MetricStat fromTypeName(String typeName) { for (MetricStat metric : MetricStat.values()) { if (metric.getTypeName().equalsIgnoreCase(typeName)) { diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java index 6535f8ed11da3..ce389a99b3626 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/StarTreeIndexSettings.java @@ -15,6 +15,7 @@ import java.util.Arrays; import java.util.List; +import java.util.function.Function; /** * Index settings for star tree fields. The settings are final as right now @@ -93,16 +94,10 @@ public class StarTreeIndexSettings { /** * Default metrics for metrics as part of star tree fields */ - public static final Setting> DEFAULT_METRICS_LIST = Setting.listSetting( + public static final Setting> DEFAULT_METRICS_LIST = Setting.listSetting( "index.composite_index.star_tree.field.default.metrics", - Arrays.asList( - MetricStat.AVG.toString(), - MetricStat.VALUE_COUNT.toString(), - MetricStat.SUM.toString(), - MetricStat.MAX.toString(), - MetricStat.MIN.toString() - ), - MetricStat::fromTypeName, + Arrays.asList(MetricStat.VALUE_COUNT.toString(), MetricStat.SUM.toString()), + Function.identity(), Setting.Property.IndexScope, Setting.Property.Final ); diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java index 90b2d0727d572..3fc8d24e6e0d2 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/builder/BaseStarTreeBuilder.java @@ -118,6 +118,9 @@ public List generateMetricAggregatorInfos(MapperService ma List metricAggregatorInfos = new ArrayList<>(); for (Metric metric : this.starTreeField.getMetrics()) { for (MetricStat metricStat : metric.getMetrics()) { + if (metricStat.isDerivedMetric()) { + continue; + } IndexNumericFieldData.NumericType numericType; Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper(metric.getField()); if (fieldMapper instanceof NumberFieldMapper) { diff --git a/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java b/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java index d9539f9dc0c82..93764e93ae30d 100644 --- a/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/StarTreeMapper.java @@ -28,6 +28,7 @@ import java.util.Locale; import java.util.Map; import java.util.Optional; +import java.util.Queue; import java.util.Set; import java.util.stream.Collectors; @@ -262,17 +263,50 @@ private Metric getMetric(String name, Map metric, Mapper.TypePar .collect(Collectors.toList()); metric.remove(STATS); if (metricStrings.isEmpty()) { - metricTypes = new ArrayList<>(StarTreeIndexSettings.DEFAULT_METRICS_LIST.get(context.getSettings())); - } else { - Set metricSet = new LinkedHashSet<>(); - for (String metricString : metricStrings) { - metricSet.add(MetricStat.fromTypeName(metricString)); - } - metricTypes = new ArrayList<>(metricSet); + metricStrings = new ArrayList<>(StarTreeIndexSettings.DEFAULT_METRICS_LIST.get(context.getSettings())); + } + // Add all required metrics initially + Set metricSet = new LinkedHashSet<>(); + for (String metricString : metricStrings) { + MetricStat metricStat = MetricStat.fromTypeName(metricString); + metricSet.add(metricStat); + addBaseMetrics(metricStat, metricSet); } + addEligibleDerivedMetrics(metricSet); + metricTypes = new ArrayList<>(metricSet); return new Metric(name, metricTypes); } + /** + * Add base metrics of derived metric to metric set + */ + private void addBaseMetrics(MetricStat metricStat, Set metricSet) { + if (metricStat.isDerivedMetric()) { + Queue metricQueue = new LinkedList<>(metricStat.getBaseMetrics()); + while (metricQueue.isEmpty() == false) { + MetricStat metric = metricQueue.poll(); + if (metric.isDerivedMetric() && !metricSet.contains(metric)) { + metricQueue.addAll(metric.getBaseMetrics()); + } + metricSet.add(metric); + } + } + } + + /** + * Add derived metrics if all associated base metrics are present + */ + private void addEligibleDerivedMetrics(Set metricStats) { + for (MetricStat metric : MetricStat.values()) { + if (metric.isDerivedMetric() && !metricStats.contains(metric)) { + List sourceMetrics = metric.getBaseMetrics(); + if (metricStats.containsAll(sourceMetrics)) { + metricStats.add(metric); + } + } + } + } + @Override protected List> getParameters() { return List.of(config); diff --git a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java index 21ff90f2bf534..4b92d6a1f5460 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java @@ -251,7 +251,7 @@ public TermsQueryBuilder(StreamInput in) throws IOException { termsLookup = in.readOptionalWriteable(TermsLookup::new); values = (List) in.readGenericValue(); this.supplier = null; - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_17_0)) { valueType = in.readEnum(ValueType.class); } } @@ -264,7 +264,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeOptionalWriteable(termsLookup); out.writeGenericValue(values); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_17_0)) { out.writeEnum(valueType); } } diff --git a/server/src/main/java/org/opensearch/index/store/RemoteStoreFileDownloader.java b/server/src/main/java/org/opensearch/index/store/RemoteStoreFileDownloader.java index 134994c0ae32c..ad42b6d677b41 100644 --- a/server/src/main/java/org/opensearch/index/store/RemoteStoreFileDownloader.java +++ b/server/src/main/java/org/opensearch/index/store/RemoteStoreFileDownloader.java @@ -149,6 +149,7 @@ private void copyOneFile( try { cancellableThreads.executeIO(() -> { destination.copyFrom(source, file, file, IOContext.DEFAULT); + logger.trace("Downloaded file {} of size {}", file, destination.fileLength(file)); onFileCompletion.run(); if (secondDestination != null) { secondDestination.copyFrom(destination, file, file, IOContext.DEFAULT); diff --git a/server/src/main/java/org/opensearch/indices/TermsLookup.java b/server/src/main/java/org/opensearch/indices/TermsLookup.java index 6b83b713c05c2..7337ed31ce41e 100644 --- a/server/src/main/java/org/opensearch/indices/TermsLookup.java +++ b/server/src/main/java/org/opensearch/indices/TermsLookup.java @@ -87,7 +87,7 @@ public TermsLookup(StreamInput in) throws IOException { path = in.readString(); index = in.readString(); routing = in.readOptionalString(); - if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (in.getVersion().onOrAfter(Version.V_2_17_0)) { store = in.readBoolean(); } } @@ -101,7 +101,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(path); out.writeString(index); out.writeOptionalString(routing); - if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (out.getVersion().onOrAfter(Version.V_2_17_0)) { out.writeBoolean(store); } } diff --git a/server/src/main/java/org/opensearch/node/remotestore/RemoteStoreNodeAttribute.java b/server/src/main/java/org/opensearch/node/remotestore/RemoteStoreNodeAttribute.java index a0f745a4270c4..2a3a2dc554ebf 100644 --- a/server/src/main/java/org/opensearch/node/remotestore/RemoteStoreNodeAttribute.java +++ b/server/src/main/java/org/opensearch/node/remotestore/RemoteStoreNodeAttribute.java @@ -264,6 +264,14 @@ public boolean equalsWithRepoSkip(Object o, List reposToSkip) { return this.getRepositoriesMetadata().equalsIgnoreGenerationsWithRepoSkip(that.getRepositoriesMetadata(), reposToSkip); } + public boolean equalsForRepositories(Object otherNode, List repositoryToValidate) { + if (this == otherNode) return true; + if (otherNode == null || getClass() != otherNode.getClass()) return false; + + RemoteStoreNodeAttribute other = (RemoteStoreNodeAttribute) otherNode; + return this.getRepositoriesMetadata().equalsIgnoreGenerationsForRepo(other.repositoriesMetadata, repositoryToValidate); + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index abb968c2de245..1401c6b89d38a 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -991,7 +991,14 @@ public int getTargetMaxSliceCount() { if (shouldUseConcurrentSearch() == false) { throw new IllegalStateException("Target slice count should not be used when concurrent search is disabled"); } - return clusterService.getClusterSettings().get(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING); + + return indexService.getIndexSettings() + .getSettings() + .getAsInt( + IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_MAX_SLICE_COUNT.getKey(), + clusterService.getClusterSettings().get(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING) + ); + } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java index f8061bcaca50f..20f294bc7199b 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -57,7 +57,11 @@ import org.opensearch.search.DocValueFormat; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Objects; import java.util.Set; import java.util.SortedSet; @@ -86,6 +90,10 @@ public class IncludeExclude implements Writeable, ToXContentFragment { * https://github.com/opensearch-project/OpenSearch/issues/2858 */ private static final int DEFAULT_MAX_REGEX_LENGTH = 1000; + /** + * The maximum number of prefixes to extract from a regex in tryCreatePrefixOrdinalsFilter + */ + private static final int MAX_PREFIXES = 1000; // for parsing purposes only // TODO: move all aggs to the same package so that this stuff could be pkg-private @@ -393,6 +401,92 @@ public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) thro } + /** + * An ordinals filter that includes/excludes all ordinals corresponding to terms starting with the given prefixes + */ + static class PrefixBackedOrdinalsFilter extends OrdinalsFilter { + + private final SortedSet includePrefixes, excludePrefixes; + + private PrefixBackedOrdinalsFilter(SortedSet includePrefixes, SortedSet excludePrefixes) { + this.includePrefixes = includePrefixes; + this.excludePrefixes = excludePrefixes; + } + + private static BytesRef nextBytesRef(BytesRef bytesRef) { + BytesRef next = BytesRef.deepCopyOf(bytesRef); + int pos = next.offset + next.length - 1; + while (pos >= next.offset && next.bytes[pos] == -1) { + next.bytes[pos] = 0; + pos--; + } + if (pos >= next.offset) { + next.bytes[pos]++; + } else { + // Every byte in our prefix had value 0xFF. We must match all subsequent ordinals. + return null; + } + return next; + } + + private interface LongBiConsumer { + void accept(long a, long b); + } + + private static void process(SortedSetDocValues globalOrdinals, long length, SortedSet prefixes, LongBiConsumer consumer) + throws IOException { + for (BytesRef prefix : prefixes) { + long startOrd = globalOrdinals.lookupTerm(prefix); + if (startOrd < 0) { + // The prefix is not an exact match in the ordinals (can skip equal length below) + startOrd = -1 - startOrd; + // Make sure that the term at startOrd starts with prefix + BytesRef startTerm = globalOrdinals.lookupOrd(startOrd); + if (startTerm.length <= prefix.length + || !Arrays.equals( + startTerm.bytes, + startTerm.offset, + startTerm.offset + prefix.length, + prefix.bytes, + prefix.offset, + prefix.offset + prefix.length + )) { + continue; + } + } + if (startOrd >= length) { + continue; + } + BytesRef next = nextBytesRef(prefix); + if (next == null) { + consumer.accept(startOrd, length); + } else { + long endOrd = globalOrdinals.lookupTerm(next); + if (endOrd < 0) { + endOrd = -1 - endOrd; + } + if (startOrd < endOrd) { + consumer.accept(startOrd, endOrd); + } + } + } + + } + + @Override + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) throws IOException { + LongBitSet accept = new LongBitSet(globalOrdinals.getValueCount()); + if (!includePrefixes.isEmpty()) { + process(globalOrdinals, accept.length(), includePrefixes, accept::set); + } else if (accept.length() > 0) { + // Exclude-only + accept.set(0, accept.length()); + } + process(globalOrdinals, accept.length(), excludePrefixes, accept::clear); + return accept; + } + } + private final String include, exclude; private final SortedSet includeValues, excludeValues; private final int incZeroBasedPartition; @@ -709,8 +803,13 @@ public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) { } public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, int maxRegexLength) { - if (isRegexBased()) { + if ((include == null || include.endsWith(".*")) && (exclude == null || exclude.endsWith(".*"))) { + PrefixBackedOrdinalsFilter prefixBackedOrdinalsFilter = tryCreatePrefixOrdinalsFilter(maxRegexLength); + if (prefixBackedOrdinalsFilter != null) { + return prefixBackedOrdinalsFilter; + } + } return new AutomatonBackedOrdinalsFilter(toAutomaton(maxRegexLength)); } if (isPartitionBased()) { @@ -720,6 +819,94 @@ public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, int maxRege return new TermListBackedOrdinalsFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format)); } + private static List expandRegexp(RegExp regExp, int maxPrefixes) { + switch (regExp.kind) { + case REGEXP_UNION: + List alternatives = new ArrayList<>(); + while (regExp.exp1.kind == RegExp.Kind.REGEXP_UNION) { + alternatives.add(regExp.exp2); + regExp = regExp.exp1; + } + alternatives.add(regExp.exp2); + alternatives.add(regExp.exp1); + List output = new ArrayList<>(); + for (RegExp leaf : alternatives) { + List leafExpansions = expandRegexp(leaf, maxPrefixes); + if (leafExpansions == null) { + return null; + } else { + if (output.size() + leafExpansions.size() > maxPrefixes) { + return null; + } + output.addAll(leafExpansions); + } + } + return output; + case REGEXP_CONCATENATION: + List prefixes = expandRegexp(regExp.exp1, maxPrefixes); + if (prefixes == null) { + return null; + } + List suffixes = expandRegexp(regExp.exp2, maxPrefixes); + if (suffixes == null) { + return null; + } + if (prefixes.size() * suffixes.size() > maxPrefixes) { + return null; + } + List out = new ArrayList<>(); + StringBuilder stringBuilder = new StringBuilder(); + for (String prefix : prefixes) { + for (String suffix : suffixes) { + stringBuilder.setLength(0); + stringBuilder.append(prefix).append(suffix); + out.add(stringBuilder.toString()); + } + } + return out; + case REGEXP_CHAR: + return List.of(Character.toString(regExp.c)); + case REGEXP_STRING: + return List.of(regExp.s); + default: + return null; + } + } + + static SortedSet extractPrefixes(String pattern, int maxRegexLength) { + if (pattern == null) { + return Collections.emptySortedSet(); + } + SortedSet prefixSet = null; + validateRegExpStringLength(pattern, maxRegexLength); + RegExp regExp = new RegExp(pattern); + if (regExp.kind == RegExp.Kind.REGEXP_CONCATENATION && regExp.exp2.kind == RegExp.Kind.REGEXP_REPEAT) { + RegExp tail = regExp.exp2.exp1; + if (tail.kind == RegExp.Kind.REGEXP_ANYCHAR || tail.kind == RegExp.Kind.REGEXP_ANYSTRING) { + List prefixes = expandRegexp(regExp.exp1, MAX_PREFIXES); + if (prefixes != null) { + prefixSet = new TreeSet<>(); + for (String prefix : prefixes) { + prefixSet.add(new BytesRef(prefix)); + } + } + } + } + return prefixSet; + } + + private PrefixBackedOrdinalsFilter tryCreatePrefixOrdinalsFilter(int maxRegexLength) { + SortedSet includeSet = extractPrefixes(include, maxRegexLength); + if (includeSet == null) { + return null; + } + SortedSet excludeSet = extractPrefixes(exclude, maxRegexLength); + if (excludeSet == null) { + return null; + } + return new PrefixBackedOrdinalsFilter(includeSet, excludeSet); + } + public LongFilter convertToLongFilter(DocValueFormat format) { if (isPartitionBased()) { diff --git a/server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java b/server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java new file mode 100644 index 0000000000000..b104925df77b3 --- /dev/null +++ b/server/src/main/java/org/opensearch/wlm/WorkloadManagementSettings.java @@ -0,0 +1,256 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.wlm; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; + +/** + * Main class to declare Workload Management related settings + */ +public class WorkloadManagementSettings { + private static final Double DEFAULT_NODE_LEVEL_MEMORY_REJECTION_THRESHOLD = 0.8; + private static final Double DEFAULT_NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD = 0.9; + private static final Double DEFAULT_NODE_LEVEL_CPU_REJECTION_THRESHOLD = 0.8; + private static final Double DEFAULT_NODE_LEVEL_CPU_CANCELLATION_THRESHOLD = 0.9; + public static final double NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD_MAX_VALUE = 0.95; + public static final double NODE_LEVEL_MEMORY_REJECTION_THRESHOLD_MAX_VALUE = 0.9; + public static final double NODE_LEVEL_CPU_CANCELLATION_THRESHOLD_MAX_VALUE = 0.95; + public static final double NODE_LEVEL_CPU_REJECTION_THRESHOLD_MAX_VALUE = 0.9; + + private Double nodeLevelMemoryCancellationThreshold; + private Double nodeLevelMemoryRejectionThreshold; + private Double nodeLevelCpuCancellationThreshold; + private Double nodeLevelCpuRejectionThreshold; + + /** + * Setting name for node level memory based rejection threshold for QueryGroup service + */ + public static final String NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.memory_rejection_threshold"; + /** + * Setting to control the memory based rejection threshold + */ + public static final Setting NODE_LEVEL_MEMORY_REJECTION_THRESHOLD = Setting.doubleSetting( + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_MEMORY_REJECTION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + /** + * Setting name for node level cpu based rejection threshold for QueryGroup service + */ + public static final String NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.cpu_rejection_threshold"; + /** + * Setting to control the cpu based rejection threshold + */ + public static final Setting NODE_LEVEL_CPU_REJECTION_THRESHOLD = Setting.doubleSetting( + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_CPU_REJECTION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + /** + * Setting name for node level memory based cancellation threshold for QueryGroup service + */ + public static final String NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.memory_cancellation_threshold"; + /** + * Setting to control the memory based cancellation threshold + */ + public static final Setting NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD = Setting.doubleSetting( + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + /** + * Setting name for node level cpu based cancellation threshold for QueryGroup service + */ + public static final String NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME = "wlm.query_group.node.cpu_cancellation_threshold"; + /** + * Setting to control the cpu based cancellation threshold + */ + public static final Setting NODE_LEVEL_CPU_CANCELLATION_THRESHOLD = Setting.doubleSetting( + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, + DEFAULT_NODE_LEVEL_CPU_CANCELLATION_THRESHOLD, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + + /** + * QueryGroup service settings constructor + * @param settings - QueryGroup service settings + * @param clusterSettings - QueryGroup cluster settings + */ + public WorkloadManagementSettings(Settings settings, ClusterSettings clusterSettings) { + nodeLevelMemoryCancellationThreshold = NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD.get(settings); + nodeLevelMemoryRejectionThreshold = NODE_LEVEL_MEMORY_REJECTION_THRESHOLD.get(settings); + nodeLevelCpuCancellationThreshold = NODE_LEVEL_CPU_CANCELLATION_THRESHOLD.get(settings); + nodeLevelCpuRejectionThreshold = NODE_LEVEL_CPU_REJECTION_THRESHOLD.get(settings); + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelMemoryRejectionThreshold, + nodeLevelMemoryCancellationThreshold, + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + ); + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelCpuRejectionThreshold, + nodeLevelCpuCancellationThreshold, + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD, this::setNodeLevelMemoryCancellationThreshold); + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_MEMORY_REJECTION_THRESHOLD, this::setNodeLevelMemoryRejectionThreshold); + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_CPU_CANCELLATION_THRESHOLD, this::setNodeLevelCpuCancellationThreshold); + clusterSettings.addSettingsUpdateConsumer(NODE_LEVEL_CPU_REJECTION_THRESHOLD, this::setNodeLevelCpuRejectionThreshold); + } + + /** + * Method to get the node level memory based cancellation threshold + * @return current node level memory based cancellation threshold + */ + public Double getNodeLevelMemoryCancellationThreshold() { + return nodeLevelMemoryCancellationThreshold; + } + + /** + * Method to set the node level memory based cancellation threshold + * @param nodeLevelMemoryCancellationThreshold sets the new node level memory based cancellation threshold + * @throws IllegalArgumentException if the value is > 0.95 and cancellation < rejection threshold + */ + public void setNodeLevelMemoryCancellationThreshold(Double nodeLevelMemoryCancellationThreshold) { + if (Double.compare(nodeLevelMemoryCancellationThreshold, NODE_LEVEL_MEMORY_CANCELLATION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.95 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelMemoryRejectionThreshold, + nodeLevelMemoryCancellationThreshold, + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelMemoryCancellationThreshold = nodeLevelMemoryCancellationThreshold; + } + + /** + * Method to get the node level cpu based cancellation threshold + * @return current node level cpu based cancellation threshold + */ + public Double getNodeLevelCpuCancellationThreshold() { + return nodeLevelCpuCancellationThreshold; + } + + /** + * Method to set the node level cpu based cancellation threshold + * @param nodeLevelCpuCancellationThreshold sets the new node level cpu based cancellation threshold + * @throws IllegalArgumentException if the value is > 0.95 and cancellation < rejection threshold + */ + public void setNodeLevelCpuCancellationThreshold(Double nodeLevelCpuCancellationThreshold) { + if (Double.compare(nodeLevelCpuCancellationThreshold, NODE_LEVEL_CPU_CANCELLATION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.95 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelCpuRejectionThreshold, + nodeLevelCpuCancellationThreshold, + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelCpuCancellationThreshold = nodeLevelCpuCancellationThreshold; + } + + /** + * Method to get the memory based node level rejection threshold + * @return the current memory based node level rejection threshold + */ + public Double getNodeLevelMemoryRejectionThreshold() { + return nodeLevelMemoryRejectionThreshold; + } + + /** + * Method to set the node level memory based rejection threshold + * @param nodeLevelMemoryRejectionThreshold sets the new memory based rejection threshold + * @throws IllegalArgumentException if rejection > 0.90 and rejection < cancellation threshold + */ + public void setNodeLevelMemoryRejectionThreshold(Double nodeLevelMemoryRejectionThreshold) { + if (Double.compare(nodeLevelMemoryRejectionThreshold, NODE_LEVEL_MEMORY_REJECTION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.90 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelMemoryRejectionThreshold, + nodeLevelMemoryCancellationThreshold, + NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, + NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelMemoryRejectionThreshold = nodeLevelMemoryRejectionThreshold; + } + + /** + * Method to get the cpu based node level rejection threshold + * @return the current cpu based node level rejection threshold + */ + public Double getNodeLevelCpuRejectionThreshold() { + return nodeLevelCpuRejectionThreshold; + } + + /** + * Method to set the node level cpu based rejection threshold + * @param nodeLevelCpuRejectionThreshold sets the new cpu based rejection threshold + * @throws IllegalArgumentException if rejection > 0.90 and rejection < cancellation threshold + */ + public void setNodeLevelCpuRejectionThreshold(Double nodeLevelCpuRejectionThreshold) { + if (Double.compare(nodeLevelCpuRejectionThreshold, NODE_LEVEL_CPU_REJECTION_THRESHOLD_MAX_VALUE) > 0) { + throw new IllegalArgumentException( + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME + " value cannot be greater than 0.90 as it can result in a node drop" + ); + } + + ensureRejectionThresholdIsLessThanCancellation( + nodeLevelCpuRejectionThreshold, + nodeLevelCpuCancellationThreshold, + NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, + NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME + ); + + this.nodeLevelCpuRejectionThreshold = nodeLevelCpuRejectionThreshold; + } + + /** + * Method to validate that the cancellation threshold is greater than or equal to rejection threshold + * @param nodeLevelRejectionThreshold rejection threshold to be compared + * @param nodeLevelCancellationThreshold cancellation threshold to be compared + * @param rejectionThresholdSettingName name of the rejection threshold setting + * @param cancellationThresholdSettingName name of the cancellation threshold setting + * @throws IllegalArgumentException if cancellation threshold is less than rejection threshold + */ + private void ensureRejectionThresholdIsLessThanCancellation( + Double nodeLevelRejectionThreshold, + Double nodeLevelCancellationThreshold, + String rejectionThresholdSettingName, + String cancellationThresholdSettingName + ) { + if (Double.compare(nodeLevelCancellationThreshold, nodeLevelRejectionThreshold) < 0) { + throw new IllegalArgumentException( + cancellationThresholdSettingName + " value should not be less than " + rejectionThresholdSettingName + ); + } + } +} diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index e030a813373c1..f51452c57f975 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1 +1 @@ -org.opensearch.index.codec.composite.Composite99Codec +org.opensearch.index.codec.composite.composite99.Composite99Codec diff --git a/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java b/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java index 37e884502b613..3c1c84653b384 100644 --- a/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java +++ b/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java @@ -85,6 +85,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.opensearch.index.remote.RemoteMigrationIndexMetadataUpdaterTests.createIndexMetadataWithRemoteStoreSettings; +import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeService.REMOTE_STORE_COMPATIBILITY_MODE_SETTING; @@ -901,6 +902,7 @@ public void testDontAllowSwitchingCompatibilityModeForClusterWithMultipleVersion private Map getRemoteStoreNodeAttributes() { Map remoteStoreNodeAttributes = new HashMap<>(); + remoteStoreNodeAttributes.put(REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-cluster-repo-1"); remoteStoreNodeAttributes.put(REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-segment-repo-1"); remoteStoreNodeAttributes.put(REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-translog-repo-1"); return remoteStoreNodeAttributes; diff --git a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java index 9f463673aa6a6..62c71f59316b0 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java @@ -628,16 +628,7 @@ public void testPreventJoinClusterWithRemoteStateNodeJoiningRemoteStoreCluster() IllegalStateException.class, () -> JoinTaskExecutor.ensureNodesCompatibility(joiningNode, currentState.getNodes(), currentState.metadata()) ); - assertTrue( - e.getMessage() - .equals( - "a remote store node [" - + joiningNode - + "] is trying to join a remote store cluster with incompatible node attributes in comparison with existing node [" - + currentState.getNodes().getNodes().values().stream().findFirst().get() - + "]" - ) - ); + assertTrue(e.getMessage().equals("a non remote store node [" + joiningNode + "] is trying to join a remote store cluster")); } public void testPreventJoinClusterWithRemoteStoreNodeJoiningRemoteStateCluster() { @@ -657,16 +648,7 @@ public void testPreventJoinClusterWithRemoteStoreNodeJoiningRemoteStateCluster() IllegalStateException.class, () -> JoinTaskExecutor.ensureNodesCompatibility(joiningNode, currentState.getNodes(), currentState.metadata()) ); - assertTrue( - e.getMessage() - .equals( - "a remote store node [" - + joiningNode - + "] is trying to join a remote store cluster with incompatible node attributes in comparison with existing node [" - + currentState.getNodes().getNodes().values().stream().findFirst().get() - + "]" - ) - ); + assertTrue(e.getMessage().equals("a remote store node [" + joiningNode + "] is trying to join a non remote store cluster")); } public void testUpdatesClusterStateWithSingleNodeCluster() throws Exception { diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 86ca8b3ad6319..ee10588769b7b 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -158,6 +158,7 @@ import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; import static org.opensearch.indices.ShardLimitValidatorTests.createTestShardLimitService; import static org.opensearch.node.Node.NODE_ATTRIBUTES; +import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.getRemoteStoreTranslogRepo; @@ -1644,7 +1645,12 @@ public void testNewIndexIsRemoteStoreBackedForRemoteStoreDirectionAndMixedMode() null ); - Map missingTranslogAttribute = Map.of(REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-segment-repo-1"); + Map missingTranslogAttribute = Map.of( + REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, + "cluster-state-repo-1", + REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, + "my-segment-repo-1" + ); DiscoveryNodes finalDiscoveryNodes = DiscoveryNodes.builder() .add(nonRemoteClusterManagerNode) @@ -2292,6 +2298,7 @@ private void verifyRemoteStoreIndexSettings( private DiscoveryNode getRemoteNode() { Map attributes = new HashMap<>(); + attributes.put(REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-cluster-rep-1"); attributes.put(REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-segment-repo-1"); attributes.put(REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-translog-repo-1"); return new DiscoveryNode( diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 5e3b74ee138ab..f8e1c609e6ee8 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -68,6 +68,7 @@ import static org.opensearch.cluster.routing.ShardRoutingState.STARTED; import static org.opensearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.opensearch.common.util.FeatureFlags.REMOTE_STORE_MIGRATION_EXPERIMENTAL; +import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeService.MIGRATION_DIRECTION_SETTING; @@ -853,6 +854,8 @@ public void testPreferReplicaOnRemoteNodeForPrimaryPromotion() { // add a remote node and start primary shard Map remoteStoreNodeAttributes = Map.of( + REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, + "REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY", REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, "REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_VALUE", REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY, diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteStoreMigrationAllocationDeciderTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteStoreMigrationAllocationDeciderTests.java index 5b29922f2400c..e6e81c94e7f32 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteStoreMigrationAllocationDeciderTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteStoreMigrationAllocationDeciderTests.java @@ -68,6 +68,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; +import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeService.Direction.NONE; import static org.opensearch.node.remotestore.RemoteStoreNodeService.Direction.REMOTE_STORE; import static org.opensearch.node.remotestore.RemoteStoreNodeService.MIGRATION_DIRECTION_SETTING; @@ -617,6 +618,7 @@ private DiscoveryNode getRemoteNode() { REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, "REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_VALUE" ); + attributes.put(REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, "REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_VALUE"); return new DiscoveryNode( UUIDs.base64UUID(), buildNewFakeTransportAddress(), diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index 7146b7dc51753..bbf98b5c32918 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -48,7 +48,7 @@ import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.IndexAnalyzers; -import org.opensearch.index.codec.composite.Composite99Codec; +import org.opensearch.index.codec.composite.composite99.Composite99Codec; import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.similarity.SimilarityService; diff --git a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java new file mode 100644 index 0000000000000..7fb8fe7f68f45 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactoryTests.java @@ -0,0 +1,85 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite; + +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.InfoStream; +import org.apache.lucene.util.Version; +import org.opensearch.index.codec.composite.composite99.Composite99Codec; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.UUID; + +public class LuceneDocValuesConsumerFactoryTests extends OpenSearchTestCase { + + private Directory directory; + private final String dataCodec = "data_codec"; + private final String dataExtension = "data_extension"; + private final String metaCodec = "meta_codec"; + private final String metaExtension = "meta_extension"; + + @Before + public void setup() { + directory = newDirectory(); + } + + public void testGetDocValuesConsumerForCompositeCodec() throws IOException { + SegmentInfo segmentInfo = new SegmentInfo( + directory, + Version.LATEST, + Version.LUCENE_9_11_0, + "test_segment", + randomInt(), + false, + false, + new Lucene99Codec(), + new HashMap<>(), + UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), + new HashMap<>(), + null + ); + SegmentWriteState state = new SegmentWriteState( + InfoStream.getDefault(), + segmentInfo.dir, + segmentInfo, + new FieldInfos(new FieldInfo[0]), + null, + newIOContext(random()) + ); + + DocValuesConsumer consumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( + state, + dataCodec, + dataExtension, + metaCodec, + metaExtension + ); + + assertEquals("org.apache.lucene.codecs.lucene90.Lucene90DocValuesConsumer", consumer.getClass().getName()); + assertEquals(CompositeCodecFactory.COMPOSITE_CODEC, Composite99Codec.COMPOSITE_INDEX_CODEC_NAME); + consumer.close(); + } + + @After + public void teardown() throws Exception { + super.tearDown(); + directory.close(); + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java new file mode 100644 index 0000000000000..55d637dfb9cae --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/composite/LuceneDocValuesProducerFactoryTests.java @@ -0,0 +1,124 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite; + +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.InfoStream; +import org.apache.lucene.util.Version; +import org.opensearch.index.codec.composite.composite99.Composite99Codec; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.UUID; + +import static org.mockito.Mockito.mock; + +public class LuceneDocValuesProducerFactoryTests extends OpenSearchTestCase { + + private Directory directory; + private final String dataCodec = "data_codec"; + private final String dataExtension = "data_extension"; + private final String metaCodec = "meta_codec"; + private final String metaExtension = "meta_extension"; + + @Before + public void setup() { + directory = newDirectory(); + } + + public void testGetDocValuesProducerForCompositeCodec99() throws IOException { + SegmentInfo segmentInfo = new SegmentInfo( + directory, + Version.LATEST, + Version.LUCENE_9_11_0, + "test_segment", + randomInt(), + false, + false, + new Lucene99Codec(), + new HashMap<>(), + UUID.randomUUID().toString().substring(0, 16).getBytes(StandardCharsets.UTF_8), + new HashMap<>(), + null + ); + + // open an consumer first in order for the producer to find the file + SegmentWriteState state = new SegmentWriteState( + InfoStream.getDefault(), + segmentInfo.dir, + segmentInfo, + new FieldInfos(new FieldInfo[0]), + null, + newIOContext(random()) + ); + DocValuesConsumer consumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( + state, + dataCodec, + dataExtension, + metaCodec, + metaExtension + ); + consumer.close(); + + SegmentReadState segmentReadState = new SegmentReadState( + segmentInfo.dir, + segmentInfo, + new FieldInfos(new FieldInfo[0]), + newIOContext(random()) + ); + DocValuesProducer producer = LuceneDocValuesProducerFactory.getDocValuesProducerForCompositeCodec( + Composite99Codec.COMPOSITE_INDEX_CODEC_NAME, + segmentReadState, + dataCodec, + dataExtension, + metaCodec, + metaExtension + ); + + assertNotNull(producer); + assertEquals("org.apache.lucene.codecs.lucene90.Lucene90DocValuesProducer", producer.getClass().getName()); + producer.close(); + } + + public void testGetDocValuesProducerForCompositeCodec_InvalidCodec() { + SegmentReadState mockSegmentReadState = mock(SegmentReadState.class); + + IllegalStateException exception = expectThrows(IllegalStateException.class, () -> { + LuceneDocValuesProducerFactory.getDocValuesProducerForCompositeCodec( + "invalid_codec", + mockSegmentReadState, + dataCodec, + dataExtension, + metaCodec, + metaExtension + ); + }); + + assertNotNull(exception); + assertTrue(exception.getMessage().contains("Invalid composite codec")); + } + + @After + public void teardown() throws Exception { + super.tearDown(); + directory.close(); + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/composite/SortedNumericDocValuesWriterWrapperTests.java b/server/src/test/java/org/opensearch/index/codec/composite/SortedNumericDocValuesWriterWrapperTests.java new file mode 100644 index 0000000000000..54eead20ef354 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/composite/SortedNumericDocValuesWriterWrapperTests.java @@ -0,0 +1,94 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedNumericDocValuesWriterWrapper; +import org.apache.lucene.index.VectorEncoding; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.util.Counter; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.Collections; + +public class SortedNumericDocValuesWriterWrapperTests extends OpenSearchTestCase { + + private SortedNumericDocValuesWriterWrapper wrapper; + private FieldInfo fieldInfo; + private Counter counter; + + @Override + public void setUp() throws Exception { + super.setUp(); + fieldInfo = new FieldInfo( + "field", + 1, + false, + false, + true, + IndexOptions.NONE, + DocValuesType.NONE, + -1, + Collections.emptyMap(), + 0, + 0, + 0, + 0, + VectorEncoding.FLOAT32, + VectorSimilarityFunction.EUCLIDEAN, + false, + false + ); + counter = Counter.newCounter(); + wrapper = new SortedNumericDocValuesWriterWrapper(fieldInfo, counter); + } + + public void testAddValue() throws IOException { + wrapper.addValue(0, 10); + wrapper.addValue(1, 20); + wrapper.addValue(2, 30); + + SortedNumericDocValues docValues = wrapper.getDocValues(); + assertNotNull(docValues); + + assertEquals(0, docValues.nextDoc()); + assertEquals(10, docValues.nextValue()); + assertEquals(1, docValues.nextDoc()); + assertEquals(20, docValues.nextValue()); + assertEquals(2, docValues.nextDoc()); + assertEquals(30, docValues.nextValue()); + } + + public void testGetDocValues() { + SortedNumericDocValues docValues = wrapper.getDocValues(); + assertNotNull(docValues); + } + + public void testMultipleValues() throws IOException { + wrapper.addValue(0, 10); + wrapper.addValue(0, 20); + wrapper.addValue(1, 30); + + SortedNumericDocValues docValues = wrapper.getDocValues(); + assertNotNull(docValues); + + assertEquals(0, docValues.nextDoc()); + assertEquals(10, docValues.nextValue()); + assertEquals(20, docValues.nextValue()); + assertThrows(IllegalStateException.class, docValues::nextValue); + + assertEquals(1, docValues.nextDoc()); + assertEquals(30, docValues.nextValue()); + assertThrows(IllegalStateException.class, docValues::nextValue); + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java b/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java index 6fa88215cad48..54a9cc035d7a9 100644 --- a/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java +++ b/server/src/test/java/org/opensearch/index/codec/composite/datacube/startree/StarTreeDocValuesFormatTests.java @@ -29,7 +29,7 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.MapperTestUtils; -import org.opensearch.index.codec.composite.Composite99Codec; +import org.opensearch.index.codec.composite.composite99.Composite99Codec; import org.opensearch.index.mapper.MapperService; import org.opensearch.indices.IndicesModule; import org.junit.After; diff --git a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java index 3fa97825cdfc6..6b3b87da89915 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StarTreeMapperTests.java @@ -52,7 +52,7 @@ public void teardown() { } public void testValidStarTree() throws IOException { - MapperService mapperService = createMapperService(getExpandedMapping("status", "size")); + MapperService mapperService = createMapperService(getExpandedMappingWithJustAvg("status", "size")); Set compositeFieldTypes = mapperService.getCompositeFieldTypes(); for (CompositeMappedFieldType type : compositeFieldTypes) { StarTreeMapper.StarTreeFieldType starTreeFieldType = (StarTreeMapper.StarTreeFieldType) type; @@ -66,7 +66,65 @@ public void testValidStarTree() throws IOException { assertEquals(expectedTimeUnits, dateDim.getIntervals()); assertEquals("status", starTreeFieldType.getDimensions().get(1).getField()); assertEquals("size", starTreeFieldType.getMetrics().get(0).getField()); - List expectedMetrics = Arrays.asList(MetricStat.SUM, MetricStat.AVG); + + // Assert COUNT and SUM gets added when AVG is defined + List expectedMetrics = Arrays.asList(MetricStat.AVG, MetricStat.VALUE_COUNT, MetricStat.SUM); + assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); + assertEquals(100, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); + assertEquals(StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP, starTreeFieldType.getStarTreeConfig().getBuildMode()); + assertEquals( + new HashSet<>(Arrays.asList("@timestamp", "status")), + starTreeFieldType.getStarTreeConfig().getSkipStarNodeCreationInDims() + ); + } + } + + public void testMetricsWithJustSum() throws IOException { + MapperService mapperService = createMapperService(getExpandedMappingWithJustSum("status", "size")); + Set compositeFieldTypes = mapperService.getCompositeFieldTypes(); + for (CompositeMappedFieldType type : compositeFieldTypes) { + StarTreeMapper.StarTreeFieldType starTreeFieldType = (StarTreeMapper.StarTreeFieldType) type; + assertEquals("@timestamp", starTreeFieldType.getDimensions().get(0).getField()); + assertTrue(starTreeFieldType.getDimensions().get(0) instanceof DateDimension); + DateDimension dateDim = (DateDimension) starTreeFieldType.getDimensions().get(0); + List expectedTimeUnits = Arrays.asList( + Rounding.DateTimeUnit.DAY_OF_MONTH, + Rounding.DateTimeUnit.MONTH_OF_YEAR + ); + assertEquals(expectedTimeUnits, dateDim.getIntervals()); + assertEquals("status", starTreeFieldType.getDimensions().get(1).getField()); + assertEquals("size", starTreeFieldType.getMetrics().get(0).getField()); + + // Assert AVG gets added when both of its base metrics is already present + List expectedMetrics = List.of(MetricStat.SUM); + assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); + assertEquals(100, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); + assertEquals(StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP, starTreeFieldType.getStarTreeConfig().getBuildMode()); + assertEquals( + new HashSet<>(Arrays.asList("@timestamp", "status")), + starTreeFieldType.getStarTreeConfig().getSkipStarNodeCreationInDims() + ); + } + } + + public void testMetricsWithCountAndSum() throws IOException { + MapperService mapperService = createMapperService(getExpandedMappingWithSumAndCount("status", "size")); + Set compositeFieldTypes = mapperService.getCompositeFieldTypes(); + for (CompositeMappedFieldType type : compositeFieldTypes) { + StarTreeMapper.StarTreeFieldType starTreeFieldType = (StarTreeMapper.StarTreeFieldType) type; + assertEquals("@timestamp", starTreeFieldType.getDimensions().get(0).getField()); + assertTrue(starTreeFieldType.getDimensions().get(0) instanceof DateDimension); + DateDimension dateDim = (DateDimension) starTreeFieldType.getDimensions().get(0); + List expectedTimeUnits = Arrays.asList( + Rounding.DateTimeUnit.DAY_OF_MONTH, + Rounding.DateTimeUnit.MONTH_OF_YEAR + ); + assertEquals(expectedTimeUnits, dateDim.getIntervals()); + assertEquals("status", starTreeFieldType.getDimensions().get(1).getField()); + assertEquals("size", starTreeFieldType.getMetrics().get(0).getField()); + + // Assert AVG gets added when both of its base metrics is already present + List expectedMetrics = List.of(MetricStat.SUM, MetricStat.VALUE_COUNT, MetricStat.AVG); assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); assertEquals(100, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); assertEquals(StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP, starTreeFieldType.getStarTreeConfig().getBuildMode()); @@ -92,13 +150,7 @@ public void testValidStarTreeDefaults() throws IOException { assertEquals(expectedTimeUnits, dateDim.getIntervals()); assertEquals("status", starTreeFieldType.getDimensions().get(1).getField()); assertEquals("status", starTreeFieldType.getMetrics().get(0).getField()); - List expectedMetrics = Arrays.asList( - MetricStat.AVG, - MetricStat.VALUE_COUNT, - MetricStat.SUM, - MetricStat.MAX, - MetricStat.MIN - ); + List expectedMetrics = Arrays.asList(MetricStat.VALUE_COUNT, MetricStat.SUM, MetricStat.AVG); assertEquals(expectedMetrics, starTreeFieldType.getMetrics().get(0).getMetrics()); assertEquals(10000, starTreeFieldType.getStarTreeConfig().maxLeafDocs()); assertEquals(StarTreeFieldConfiguration.StarTreeBuildMode.OFF_HEAP, starTreeFieldType.getStarTreeConfig().getBuildMode()); @@ -109,7 +161,7 @@ public void testValidStarTreeDefaults() throws IOException { public void testInvalidDim() { MapperParsingException ex = expectThrows( MapperParsingException.class, - () -> createMapperService(getExpandedMapping("invalid", "size")) + () -> createMapperService(getExpandedMappingWithJustAvg("invalid", "size")) ); assertEquals("Failed to parse mapping [_doc]: unknown dimension field [invalid]", ex.getMessage()); } @@ -117,7 +169,7 @@ public void testInvalidDim() { public void testInvalidMetric() { MapperParsingException ex = expectThrows( MapperParsingException.class, - () -> createMapperService(getExpandedMapping("status", "invalid")) + () -> createMapperService(getExpandedMappingWithJustAvg("status", "invalid")) ); assertEquals("Failed to parse mapping [_doc]: unknown metric field [invalid]", ex.getMessage()); } @@ -232,6 +284,9 @@ public void testMetric() { assertEquals(MetricStat.MIN, MetricStat.fromTypeName("min")); assertEquals(MetricStat.SUM, MetricStat.fromTypeName("sum")); assertEquals(MetricStat.AVG, MetricStat.fromTypeName("avg")); + + assertEquals(List.of(MetricStat.VALUE_COUNT, MetricStat.SUM), MetricStat.AVG.getBaseMetrics()); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> MetricStat.fromTypeName("invalid")); assertEquals("Invalid metric stat: invalid", ex.getMessage()); } @@ -310,7 +365,7 @@ public void testStarTreeField() { } public void testValidations() throws IOException { - MapperService mapperService = createMapperService(getExpandedMapping("status", "size")); + MapperService mapperService = createMapperService(getExpandedMappingWithJustAvg("status", "size")); Settings settings = Settings.builder().put(CompositeIndexSettings.STAR_TREE_INDEX_ENABLED_SETTING.getKey(), true).build(); CompositeIndexSettings enabledCompositeIndexSettings = new CompositeIndexSettings( settings, @@ -370,7 +425,7 @@ public void testValidations() throws IOException { ); } - private XContentBuilder getExpandedMapping(String dim, String metric) throws IOException { + private XContentBuilder getExpandedMappingWithJustAvg(String dim, String metric) throws IOException { return topMapping(b -> { b.startObject("composite"); b.startObject("startree"); @@ -399,7 +454,6 @@ private XContentBuilder getExpandedMapping(String dim, String metric) throws IOE b.startObject(); b.field("name", metric); b.startArray("stats"); - b.value("sum"); b.value("avg"); b.endArray(); b.endObject(); @@ -421,6 +475,107 @@ private XContentBuilder getExpandedMapping(String dim, String metric) throws IOE }); } + private XContentBuilder getExpandedMappingWithJustSum(String dim, String metric) throws IOException { + return topMapping(b -> { + b.startObject("composite"); + b.startObject("startree"); + b.field("type", "star_tree"); + b.startObject("config"); + b.field("max_leaf_docs", 100); + b.startArray("skip_star_node_creation_for_dimensions"); + { + b.value("@timestamp"); + b.value("status"); + } + b.endArray(); + b.startArray("ordered_dimensions"); + b.startObject(); + b.field("name", "@timestamp"); + b.startArray("calendar_intervals"); + b.value("day"); + b.value("month"); + b.endArray(); + b.endObject(); + b.startObject(); + b.field("name", dim); + b.endObject(); + b.endArray(); + b.startArray("metrics"); + b.startObject(); + b.field("name", metric); + b.startArray("stats"); + b.value("sum"); + b.endArray(); + b.endObject(); + b.endArray(); + b.endObject(); + b.endObject(); + b.endObject(); + b.startObject("properties"); + b.startObject("@timestamp"); + b.field("type", "date"); + b.endObject(); + b.startObject("status"); + b.field("type", "integer"); + b.endObject(); + b.startObject("size"); + b.field("type", "integer"); + b.endObject(); + b.endObject(); + }); + } + + private XContentBuilder getExpandedMappingWithSumAndCount(String dim, String metric) throws IOException { + return topMapping(b -> { + b.startObject("composite"); + b.startObject("startree"); + b.field("type", "star_tree"); + b.startObject("config"); + b.field("max_leaf_docs", 100); + b.startArray("skip_star_node_creation_for_dimensions"); + { + b.value("@timestamp"); + b.value("status"); + } + b.endArray(); + b.startArray("ordered_dimensions"); + b.startObject(); + b.field("name", "@timestamp"); + b.startArray("calendar_intervals"); + b.value("day"); + b.value("month"); + b.endArray(); + b.endObject(); + b.startObject(); + b.field("name", dim); + b.endObject(); + b.endArray(); + b.startArray("metrics"); + b.startObject(); + b.field("name", metric); + b.startArray("stats"); + b.value("sum"); + b.value("value_count"); + b.endArray(); + b.endObject(); + b.endArray(); + b.endObject(); + b.endObject(); + b.endObject(); + b.startObject("properties"); + b.startObject("@timestamp"); + b.field("type", "date"); + b.endObject(); + b.startObject("status"); + b.field("type", "integer"); + b.endObject(); + b.startObject("size"); + b.field("type", "integer"); + b.endObject(); + b.endObject(); + }); + } + private XContentBuilder getMinMapping() throws IOException { return getMinMapping(false, false, false, false); } diff --git a/server/src/test/java/org/opensearch/index/remote/RemoteStoreUtilsTests.java b/server/src/test/java/org/opensearch/index/remote/RemoteStoreUtilsTests.java index ec48032df4a15..130fb9a53cbf7 100644 --- a/server/src/test/java/org/opensearch/index/remote/RemoteStoreUtilsTests.java +++ b/server/src/test/java/org/opensearch/index/remote/RemoteStoreUtilsTests.java @@ -60,6 +60,7 @@ import static org.opensearch.index.store.RemoteSegmentStoreDirectory.MetadataFilenameUtils.METADATA_PREFIX; import static org.opensearch.index.store.RemoteSegmentStoreDirectory.MetadataFilenameUtils.SEPARATOR; import static org.opensearch.index.translog.transfer.TranslogTransferMetadata.METADATA_SEPARATOR; +import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeService.REMOTE_STORE_COMPATIBILITY_MODE_SETTING; @@ -533,6 +534,7 @@ private RoutingTable createRoutingTableAllShardsStarted( private Map getRemoteStoreNodeAttributes() { Map remoteStoreNodeAttributes = new HashMap<>(); + remoteStoreNodeAttributes.put(REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-cluster-repo-1"); remoteStoreNodeAttributes.put(REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-segment-repo-1"); remoteStoreNodeAttributes.put(REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY, "my-translog-repo-1"); return remoteStoreNodeAttributes; diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 1caa2c99fc3b8..e8a0f70ee3563 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -54,6 +54,7 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.WriteRequest; +import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; @@ -1413,6 +1414,106 @@ public void testConcurrentSegmentSearchSearchContext() throws IOException { .get(); } + /** + * Tests that the slice count is calculated correctly when concurrent search is enabled + * If concurrent search enabled - + * pick index level slice count setting if index level setting is set + * else pick default cluster level slice count setting + * @throws IOException + */ + public void testConcurrentSegmentSearchSliceCount() throws IOException { + + String index = randomAlphaOfLengthBetween(5, 10).toLowerCase(Locale.ROOT); + IndexService indexService = createIndex(index); + final SearchService service = getInstanceFromNode(SearchService.class); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + ShardId shardId = new ShardId(indexService.index(), 0); + long nowInMillis = System.currentTimeMillis(); + String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.allowPartialSearchResults(randomBoolean()); + ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, + searchRequest, + shardId, + indexService.numberOfShards(), + AliasFilter.EMPTY, + 1f, + nowInMillis, + clusterAlias, + Strings.EMPTY_ARRAY + ); + // enable concurrent search + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true)) + .get(); + + Integer[][] scenarios = { + // cluster setting, index setting, expected slice count + // expected value null will pick up default value from settings + { null, null, clusterService.getClusterSettings().get(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING) }, + { 4, null, 4 }, + { null, 3, 3 }, + { 4, 3, 3 }, }; + + for (Integer[] sliceCounts : scenarios) { + Integer clusterSliceCount = sliceCounts[0]; + Integer indexSliceCount = sliceCounts[1]; + Integer targetSliceCount = sliceCounts[2]; + + if (clusterSliceCount != null) { + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder() + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), clusterSliceCount) + ) + .get(); + } else { + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder().putNull(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey()) + ) + .get(); + } + if (indexSliceCount != null) { + client().admin() + .indices() + .prepareUpdateSettings(index) + .setSettings( + Settings.builder().put(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_MAX_SLICE_COUNT.getKey(), indexSliceCount) + ) + .get(); + } else { + client().admin() + .indices() + .prepareUpdateSettings(index) + .setSettings(Settings.builder().putNull(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_MAX_SLICE_COUNT.getKey())) + .get(); + } + + try (DefaultSearchContext searchContext = service.createSearchContext(request, new TimeValue(System.currentTimeMillis()))) { + searchContext.evaluateRequestShouldUseConcurrentSearch(); + assertEquals(targetSliceCount.intValue(), searchContext.getTargetMaxSliceCount()); + } + } + // cleanup + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder() + .putNull(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey()) + .putNull(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey()) + ) + .get(); + } + /** * Test that the Search Context for concurrent segment search enabled is set correctly at the time of construction. * The same is used throughout the context object lifetime even if cluster setting changes before the request completion. diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/IncludeExcludeTests.java similarity index 58% rename from server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java rename to server/src/test/java/org/opensearch/search/aggregations/bucket/terms/IncludeExcludeTests.java index f223648de6ef4..2356e2dc6d855 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/IncludeExcludeTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.opensearch.search.aggregations.support; +package org.opensearch.search.aggregations.bucket.terms; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.SortedSetDocValues; @@ -44,13 +44,14 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.fielddata.AbstractSortedSetDocValues; import org.opensearch.search.DocValueFormat; -import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude.OrdinalsFilter; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; import java.util.Collections; +import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.atomic.LongAdder; public class IncludeExcludeTests extends OpenSearchTestCase { @@ -297,4 +298,251 @@ private IncludeExclude serializeMixedRegex(IncludeExclude incExc) throws IOExcep } } + private static BytesRef[] toBytesRefArray(String... values) { + BytesRef[] bytesRefs = new BytesRef[values.length]; + for (int i = 0; i < values.length; i++) { + bytesRefs[i] = new BytesRef(values[i]); + } + return bytesRefs; + } + + public void testPrefixOrds() throws IOException { + IncludeExclude includeExclude = new IncludeExclude("(color|length|size):.*", "color:g.*"); + + OrdinalsFilter ordinalsFilter = includeExclude.convertToOrdinalsFilter(DocValueFormat.RAW); + + // Which of the following match the filter or not? + BytesRef[] bytesRefs = toBytesRefArray( + "color:blue", // true + "color:crimson", // true + "color:green", // false (excluded) + "color:gray", // false (excluded) + "depth:10in", // false + "depth:12in", // false + "depth:17in", // false + "length:long", // true + "length:medium", // true + "length:short", // true + "material:cotton", // false + "material:linen", // false + "material:polyester", // false + "size:large", // true + "size:medium", // true + "size:small", // true + "width:13in", // false + "width:27in", // false + "width:55in" // false + ); + boolean[] expectedFilter = new boolean[] { + true, + true, + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true, + true, + false, + false, + false }; + + LongAdder lookupCount = new LongAdder(); + SortedSetDocValues sortedSetDocValues = new AbstractSortedSetDocValues() { + @Override + public boolean advanceExact(int target) { + return false; + } + + @Override + public long nextOrd() { + return 0; + } + + @Override + public int docValueCount() { + return 1; + } + + @Override + public BytesRef lookupOrd(long ord) { + lookupCount.increment(); + int ordIndex = Math.toIntExact(ord); + return bytesRefs[ordIndex]; + } + + @Override + public long getValueCount() { + return bytesRefs.length; + } + }; + + LongBitSet acceptedOrds = ordinalsFilter.acceptedGlobalOrdinals(sortedSetDocValues); + long prefixLookupCount = lookupCount.longValue(); + assertEquals(expectedFilter.length, acceptedOrds.length()); + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(expectedFilter[i], acceptedOrds.get(i)); + } + + // Now repeat, but this time, the prefix optimization won't work (because of the .+ on the exclude filter) + includeExclude = new IncludeExclude("(color|length|size):.*", "color:g.+"); + ordinalsFilter = includeExclude.convertToOrdinalsFilter(DocValueFormat.RAW); + acceptedOrds = ordinalsFilter.acceptedGlobalOrdinals(sortedSetDocValues); + long regexpLookupCount = lookupCount.longValue() - prefixLookupCount; + + // The filter should be functionally the same + assertEquals(expectedFilter.length, acceptedOrds.length()); + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(expectedFilter[i], acceptedOrds.get(i)); + } + + // But the full regexp requires more lookups + assertTrue(regexpLookupCount > prefixLookupCount); + } + + public void testExtractPrefixes() { + // Positive tests + assertEquals(bytesRefs("a"), IncludeExclude.extractPrefixes("a.*", 1000)); + assertEquals(bytesRefs("a", "b"), IncludeExclude.extractPrefixes("(a|b).*", 1000)); + assertEquals(bytesRefs("ab"), IncludeExclude.extractPrefixes("a(b).*", 1000)); + assertEquals(bytesRefs("ab", "ac"), IncludeExclude.extractPrefixes("a(b|c).*", 1000)); + assertEquals(bytesRefs("aabb", "aacc"), IncludeExclude.extractPrefixes("aa(bb|cc).*", 1000)); + + // No regex means no prefixes + assertEquals(bytesRefs(), IncludeExclude.extractPrefixes(null, 1000)); + + // Negative tests + assertNull(IncludeExclude.extractPrefixes(".*", 1000)); // Literal match-all has no prefixes + assertNull(IncludeExclude.extractPrefixes("ab?.*", 1000)); // Don't support optionals ye + // The following cover various cases involving infinite possible prefixes + assertNull(IncludeExclude.extractPrefixes("ab*.*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a*(b|c).*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a(b*|c)d.*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a(b|c*)d.*", 1000)); + assertNull(IncludeExclude.extractPrefixes("a(b|c*)d*.*", 1000)); + + // Test with too many possible prefixes -- 10 * 10 * 10 + 1 > 1000 + assertNull( + IncludeExclude.extractPrefixes( + "((a1|a2|a3|a4|a5|a6|a7|a8|a9|a10)" + "(b1|b2|b3|b4|b5|b6|b7|b8|b9|b10)" + "(c1|c2|c3|c4|c5|c6|c7|c8|c9|c10)|x).*", + 1000 + ) + ); + // Test with too many possible prefixes -- 10 * 10 * 11 > 1000 + assertNull( + IncludeExclude.extractPrefixes( + "((a1|a2|a3|a4|a5|a6|a7|a8|a9|a10)" + "(b1|b2|b3|b4|b5|b6|b7|b8|b9|b10)" + "(c1|c2|c3|c4|c5|c6|c7|c8|c9|c10|c11)).*", + 1000 + ) + ); + } + + private static SortedSet bytesRefs(String... strings) { + SortedSet bytesRefs = new TreeSet<>(); + for (String string : strings) { + bytesRefs.add(new BytesRef(string)); + } + return bytesRefs; + } + + private static SortedSetDocValues toDocValues(BytesRef[] bytesRefs) { + return new AbstractSortedSetDocValues() { + @Override + public boolean advanceExact(int target) { + return false; + } + + @Override + public long nextOrd() { + return 0; + } + + @Override + public int docValueCount() { + return 1; + } + + @Override + public BytesRef lookupOrd(long ord) { + int ordIndex = Math.toIntExact(ord); + return bytesRefs[ordIndex]; + } + + @Override + public long getValueCount() { + return bytesRefs.length; + } + }; + } + + public void testOnlyIncludeExcludePrefix() throws IOException { + String incExcString = "(color:g|width:).*"; + IncludeExclude excludeOnly = new IncludeExclude(null, incExcString); + + OrdinalsFilter ordinalsFilter = excludeOnly.convertToOrdinalsFilter(DocValueFormat.RAW); + // Which of the following match the filter or not? + BytesRef[] bytesRefs = toBytesRefArray( + "color:blue", // true + "color:crimson", // true + "color:green", // false (excluded) + "color:gray", // false (excluded) + "depth:10in", // true + "depth:12in", // true + "depth:17in", // true + "length:long", // true + "length:medium", // true + "length:short", // true + "material:cotton", // true + "material:linen", // true + "material:polyester", // true + "size:large", // true + "size:medium", // true + "size:small", // true + "width:13in", // false + "width:27in", // false + "width:55in" // false + ); + boolean[] expectedFilter = new boolean[] { + true, + true, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false }; + LongBitSet longBitSet = ordinalsFilter.acceptedGlobalOrdinals(toDocValues(bytesRefs)); + + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(expectedFilter[i], longBitSet.get(i)); + } + + // Now repeat, but this time include only. + IncludeExclude includeOnly = new IncludeExclude(incExcString, null); + ordinalsFilter = includeOnly.convertToOrdinalsFilter(DocValueFormat.RAW); + longBitSet = ordinalsFilter.acceptedGlobalOrdinals(toDocValues(bytesRefs)); + + // The previously excluded ordinals should be included and vice versa. + for (int i = 0; i < expectedFilter.length; i++) { + assertEquals(!expectedFilter[i], longBitSet.get(i)); + } + } } diff --git a/server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java b/server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java new file mode 100644 index 0000000000000..0f183555781d3 --- /dev/null +++ b/server/src/test/java/org/opensearch/wlm/WorkloadManagementSettingsTests.java @@ -0,0 +1,241 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.wlm; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME; +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME; +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME; +import static org.opensearch.wlm.WorkloadManagementSettings.NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME; + +public class WorkloadManagementSettingsTests extends OpenSearchTestCase { + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.memory_cancellation_threshold} accidentally during + * new feature development. This test is to ensure that {@link WorkloadManagementSettings} holds the + * invariant {@code nodeLevelRejectionThreshold < nodeLevelCancellationThreshold} + */ + public void testInvalidMemoryInstantiationOfWorkloadManagementSettings() { + Settings settings = Settings.builder() + .put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.8) + .put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.7) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + + assertThrows(IllegalArgumentException.class, () -> new WorkloadManagementSettings(settings, cs)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.cpu_cancellation_threshold} accidentally during + * new feature development. This test is to ensure that {@link WorkloadManagementSettings} holds the + * invariant {@code nodeLevelRejectionThreshold < nodeLevelCancellationThreshold} + */ + public void testInvalidCpuInstantiationOfWorkloadManagementSettings() { + Settings settings = Settings.builder() + .put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.8) + .put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.7) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + + assertThrows(IllegalArgumentException.class, () -> new WorkloadManagementSettings(settings, cs)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * Using setNodeLevelCpuRejectionThreshold function + */ + public void testValidNodeLevelCpuRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelCpuRejectionThreshold(0.7); + assertEquals(0.7, workloadManagementSettings.getNodeLevelCpuRejectionThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_rejection_threshold} + */ + public void testValidNodeLevelCpuRejectionThresholdCase2() { + Settings settings = Settings.builder().put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.79).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.79, workloadManagementSettings.getNodeLevelCpuRejectionThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * When the value is set more than {@literal 0.9} + */ + public void testInvalidNodeLevelCpuRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuRejectionThreshold(0.95)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.cpu_cancellation_threshold} + */ + public void testInvalidNodeLevelCpuRejectionThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuRejectionThreshold(0.85)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + */ + public void testValidNodeLevelCpuCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.8, workloadManagementSettings.getNodeLevelCpuRejectionThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + * Using setNodeLevelCpuCancellationThreshold function + */ + public void testValidNodeLevelCpuCancellationThresholdCase2() { + Settings settings = Settings.builder().put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelCpuCancellationThreshold(0.83); + assertEquals(0.83, workloadManagementSettings.getNodeLevelCpuCancellationThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + * When the value is set more than {@literal 0.95} + */ + public void testInvalidNodeLevelCpuCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuCancellationThreshold(0.96)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.cpu_cancellation_threshold} + * When the value is set less than {@code wlm.query_group.node.cpu_rejection_threshold} + */ + public void testInvalidNodeLevelCpuCancellationThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_CPU_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_CPU_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelCpuCancellationThreshold(0.65)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_cancellation_threshold} + */ + public void testValidNodeLevelMemoryCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.8, workloadManagementSettings.getNodeLevelMemoryCancellationThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_cancellation_threshold} + * Using setNodeLevelMemoryCancellationThreshold function + */ + public void testValidNodeLevelMemoryCancellationThresholdCase2() { + Settings settings = Settings.builder().put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.8).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelMemoryCancellationThreshold(0.83); + assertEquals(0.83, workloadManagementSettings.getNodeLevelMemoryCancellationThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_cancellation_threshold} + * When the value is set more than {@literal 0.95} + */ + public void testInvalidNodeLevelMemoryCancellationThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryCancellationThreshold(0.96)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_cancellation_threshold} + * When the value is set less than {@code wlm.query_group.node.memory_rejection_threshold} + */ + public void testInvalidNodeLevelMemoryCancellationThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryCancellationThreshold(0.65)); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_rejection_threshold} + */ + public void testValidNodeLevelMemoryRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.79).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertEquals(0.79, workloadManagementSettings.getNodeLevelMemoryRejectionThreshold(), 1e-9); + } + + /** + * Tests the valid value for {@code wlm.query_group.node.memory_rejection_threshold} + * Using setNodeLevelMemoryRejectionThreshold function + */ + public void testValidNodeLevelMemoryRejectionThresholdCase2() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + workloadManagementSettings.setNodeLevelMemoryRejectionThreshold(0.86); + assertEquals(0.86, workloadManagementSettings.getNodeLevelMemoryRejectionThreshold(), 1e-9); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_rejection_threshold} + * When the value is set more than {@literal 0.9} + */ + public void testInvalidNodeLevelMemoryRejectionThresholdCase1() { + Settings settings = Settings.builder().put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.9).build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryRejectionThreshold(0.92)); + } + + /** + * Tests the invalid value for {@code wlm.query_group.node.memory_rejection_threshold} + * When the value is set more than {@code wlm.query_group.node.memory_cancellation_threshold} + */ + public void testInvalidNodeLevelMemoryRejectionThresholdCase2() { + Settings settings = Settings.builder() + .put(NODE_MEMORY_REJECTION_THRESHOLD_SETTING_NAME, 0.7) + .put(NODE_MEMORY_CANCELLATION_THRESHOLD_SETTING_NAME, 0.8) + .build(); + ClusterSettings cs = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + WorkloadManagementSettings workloadManagementSettings = new WorkloadManagementSettings(settings, cs); + assertThrows(IllegalArgumentException.class, () -> workloadManagementSettings.setNodeLevelMemoryRejectionThreshold(0.85)); + } +} diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestUtils.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestUtils.java index abf8f2a4da6c1..50a1751546293 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestUtils.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestUtils.java @@ -21,6 +21,7 @@ import java.util.Map; public class IndexShardTestUtils { + public static final String MOCK_STATE_REPO_NAME = "state-test-repo"; public static final String MOCK_SEGMENT_REPO_NAME = "segment-test-repo"; public static final String MOCK_TLOG_REPO_NAME = "tlog-test-repo"; @@ -37,6 +38,7 @@ public static DiscoveryNode getFakeDiscoNode(String id) { public static DiscoveryNode getFakeRemoteEnabledNode(String id) { Map remoteNodeAttributes = new HashMap(); + remoteNodeAttributes.put(RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, MOCK_STATE_REPO_NAME); remoteNodeAttributes.put(RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, MOCK_SEGMENT_REPO_NAME); remoteNodeAttributes.put(RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY, MOCK_TLOG_REPO_NAME); return new DiscoveryNode( diff --git a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json index e69c2db6ff400..18e30ec6f87d5 100644 --- a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json +++ b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.json @@ -19,8 +19,22 @@ "type":"myfilter" }, "dict_dec":{ - "type":"dictionary_decompounder", - "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"] + "type":"dictionary_decompounder", + "word_list":["donau", "dampf", "schiff", "spargel", "creme", "suppe"] + }, + "hyphen_dec":{ + "type":"hyphenation_decompounder", + "word_list":["læse", "hest"], + "hyphenation_patterns_path": "da_UTF8.xml" + }, + "hyphen_dec_no_sub_matches":{ + "type":"hyphenation_decompounder", + "word_list":["basketball", "basket", "ball", "kurv"], + "hyphenation_patterns_path": "da_UTF8.xml", + "only_longest_match": false, + "no_sub_matches": true, + "no_overlapping_matches": false + } }, "analyzer":{ @@ -45,8 +59,16 @@ "filter":["lowercase", "stop", "czech_stem"] }, "decompoundingAnalyzer":{ - "tokenizer":"standard", - "filter":["dict_dec"] + "tokenizer":"standard", + "filter":["dict_dec"] + }, + "hyphenationAnalyzer":{ + "tokenizer":"standard", + "filter":["hyphen_dec"] + }, + "hyphenationAnalyzerNoSubMatches":{ + "tokenizer":"standard", + "filter":["hyphen_dec_no_sub_matches"] } } } diff --git a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml index 82f933296a314..30862a9d0be8e 100644 --- a/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml +++ b/test/framework/src/main/resources/org/opensearch/analysis/common/test1.yml @@ -15,6 +15,18 @@ index : dict_dec : type : dictionary_decompounder word_list : [donau, dampf, schiff, spargel, creme, suppe] + hyphen_dec : + type : hyphenation_decompounder + word_list : [læse, hest] + hyphenation_patterns_path: "da_UTF8.xml" + hyphen_dec_no_sub_matches : + type : hyphenation_decompounder + word_list : [basketball, basket, ball, kurv] + hyphenation_patterns_path: "da_UTF8.xml" + only_longest_match: False + no_sub_matches: True + no_overlapping_matches: False + analyzer : standard : type : standard @@ -37,3 +49,9 @@ index : decompoundingAnalyzer : tokenizer : standard filter : [dict_dec] + hyphenationAnalyzer : + tokenizer : standard + filter : [hyphen_dec] + hyphenationAnalyzerNoSubMatches : + tokenizer : standard + filter : [hyphen_dec_no_sub_matches]