Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
… into feature/pit_stats
  • Loading branch information
Ajay Kumar Movva committed Aug 2, 2022
2 parents f93ddc5 + fcdea3a commit f46f7eb
Show file tree
Hide file tree
Showing 397 changed files with 4,149 additions and 1,185 deletions.
2 changes: 1 addition & 1 deletion DEVELOPER_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ Work to make sure that OpenSearch can scale in a distributed manner.

Includes:

- Nodes (Master, Data, Compute, Ingest, Discovery, etc.)
- Nodes (Cluster Manager, Data, Compute, Ingest, Discovery, etc.)
- Replication & Merge Policies (Document, Segment level)
- Snapshot/Restore (repositories; S3, Azure, GCP, NFS)
- Translog (e.g., OpenSearch, Kafka, Kinesis)
Expand Down
2 changes: 1 addition & 1 deletion TESTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ The branch needs to be available on the remote that the BWC makes of the reposit

Example:

Say you need to make a change to `master` and have a BWC layer in `5.x`. You will need to: . Create a branch called `index_req_change` off your remote `${remote}`. This will contain your change. . Create a branch called `index_req_bwc_5.x` off `5.x`. This will contain your bwc layer. . Push both branches to your remote repository. . Run the tests with `./gradlew check -Dbwc.remote=${remote} -Dbwc.refspec.5.x=index_req_bwc_5.x`.
Say you need to make a change to `main` and have a BWC layer in `5.x`. You will need to: . Create a branch called `index_req_change` off your remote `${remote}`. This will contain your change. . Create a branch called `index_req_bwc_5.x` off `5.x`. This will contain your bwc layer. . Push both branches to your remote repository. . Run the tests with `./gradlew check -Dbwc.remote=${remote} -Dbwc.refspec.5.x=index_req_bwc_5.x`.

### Skip fetching latest

Expand Down
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
id "com.diffplug.spotless" version "6.8.0" apply false
id "com.diffplug.spotless" version "6.9.0" apply false
id "org.gradle.test-retry" version "1.4.0" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
Expand Down
2 changes: 2 additions & 0 deletions buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ dependencies {
api 'commons-io:commons-io:2.7'
api "net.java.dev.jna:jna:5.11.0"
api 'gradle.plugin.com.github.johnrengelman:shadow:7.1.2'
api 'org.jdom:jdom2:2.0.6.1'
api 'org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.7.10'
api 'de.thetaphi:forbiddenapis:3.3'
api 'com.avast.gradle:gradle-docker-compose-plugin:0.15.2'
api 'org.apache.maven:maven-model:3.6.2'
Expand Down
4 changes: 2 additions & 2 deletions buildSrc/version.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
opensearch = 3.0.0
lucene = 9.3.0-snapshot-b7231bb
lucene = 9.3.0

bundled_jdk_vendor = adoptium
bundled_jdk = 17.0.3+7
Expand Down Expand Up @@ -32,7 +32,7 @@ commonslogging = 1.2
commonscodec = 1.13

# plugin dependencies
aws = 1.12.247
aws = 1.12.270

# when updating this version, you need to ensure compatibility with:
# - plugins/ingest-attachment (transitive dependency, check the upstream POM)
Expand Down
2 changes: 1 addition & 1 deletion modules/aggs-matrix-stats/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ apply plugin: 'opensearch.yaml-rest-test'

opensearchplugin {
description 'Adds aggregations whose input are a list of numeric fields and output includes a matrix.'
classname 'org.opensearch.search.aggregations.matrix.MatrixAggregationPlugin'
classname 'org.opensearch.search.aggregations.matrix.MatrixAggregationModulePlugin'
hasClientJar = true
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@

import static java.util.Collections.singletonList;

public class MatrixAggregationPlugin extends Plugin implements SearchPlugin {
public class MatrixAggregationModulePlugin extends Plugin implements SearchPlugin {
@Override
public List<AggregationSpec> getAggregations() {
return singletonList(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import org.opensearch.search.aggregations.Aggregation;
import org.opensearch.search.aggregations.InternalAggregation;
import org.opensearch.search.aggregations.ParsedAggregation;
import org.opensearch.search.aggregations.matrix.MatrixAggregationPlugin;
import org.opensearch.search.aggregations.matrix.MatrixAggregationModulePlugin;
import org.opensearch.search.aggregations.matrix.stats.InternalMatrixStats.Fields;
import org.opensearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import org.opensearch.test.InternalAggregationTestCase;
Expand All @@ -64,7 +64,7 @@ public class InternalMatrixStatsTests extends InternalAggregationTestCase<Intern

@Override
protected SearchPlugin registerPlugin() {
return new MatrixAggregationPlugin();
return new MatrixAggregationModulePlugin();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
import org.opensearch.index.mapper.NumberFieldMapper;
import org.opensearch.plugins.SearchPlugin;
import org.opensearch.search.aggregations.AggregatorTestCase;
import org.opensearch.search.aggregations.matrix.MatrixAggregationPlugin;
import org.opensearch.search.aggregations.matrix.MatrixAggregationModulePlugin;

import java.util.Arrays;
import java.util.Collections;
Expand Down Expand Up @@ -128,6 +128,6 @@ public void testTwoFields() throws Exception {

@Override
protected List<SearchPlugin> getSearchPlugins() {
return Collections.singletonList(new MatrixAggregationPlugin());
return Collections.singletonList(new MatrixAggregationModulePlugin());
}
}
2 changes: 1 addition & 1 deletion modules/analysis-common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ apply plugin: 'opensearch.internal-cluster-test'

opensearchplugin {
description 'Adds "built in" analyzers to OpenSearch.'
classname 'org.opensearch.analysis.common.CommonAnalysisPlugin'
classname 'org.opensearch.analysis.common.CommonAnalysisModulePlugin'
extendedPlugins = ['lang-painless']
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
public class QueryStringWithAnalyzersIT extends OpenSearchIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(CommonAnalysisPlugin.class);
return Arrays.asList(CommonAnalysisModulePlugin.class);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,9 +167,9 @@

import static org.opensearch.plugins.AnalysisPlugin.requiresAnalysisSettings;

public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, ScriptPlugin {
public class CommonAnalysisModulePlugin extends Plugin implements AnalysisPlugin, ScriptPlugin {

private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonAnalysisPlugin.class);
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(CommonAnalysisModulePlugin.class);

private final SetOnce<ScriptService> scriptService = new SetOnce<>();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public void testDefault() throws IOException {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ascii_folding");
String source = "Ansprüche";
Expand All @@ -68,7 +68,7 @@ public void testPreserveOriginal() throws IOException {
.put("index.analysis.filter.my_ascii_folding.type", "asciifolding")
.put("index.analysis.filter.my_ascii_folding.preserve_original", true)
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_ascii_folding");
String source = "Ansprüche";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public void testDefault() throws IOException {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_word_delimiter.type", type)
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
Expand All @@ -78,7 +78,7 @@ public void testCatenateWords() throws IOException {
.put("index.analysis.filter.my_word_delimiter.catenate_words", "true")
.put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
Expand All @@ -96,7 +96,7 @@ public void testCatenateNumbers() throws IOException {
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
Expand All @@ -115,7 +115,7 @@ public void testCatenateAll() throws IOException {
.put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false")
.put("index.analysis.filter.my_word_delimiter.catenate_all", "true")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
Expand All @@ -132,7 +132,7 @@ public void testSplitOnCaseChange() throws IOException {
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot";
Expand All @@ -149,7 +149,7 @@ public void testPreserveOriginal() throws IOException {
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.preserve_original", "true")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
Expand Down Expand Up @@ -186,7 +186,7 @@ public void testStemEnglishPossessive() throws IOException {
.put("index.analysis.filter.my_word_delimiter.type", type)
.put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_word_delimiter");
String source = "PowerShot 500-42 wi-fi wi-fi-4000 j2se O'Neil's";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public class CJKFilterFactoryTests extends OpenSearchTokenStreamTestCase {

@Before
public void setup() throws IOException {
analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE, new CommonAnalysisPlugin());
analysis = AnalysisTestsHelper.createTestAnalysisFromClassPath(createTempDir(), RESOURCE, new CommonAnalysisModulePlugin());
}

public void testDefault() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@

public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
public CommonAnalysisFactoryTests() {
super(new CommonAnalysisPlugin());
super(new CommonAnalysisModulePlugin());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ public void testDefault() throws IOException {
.build();

try {
AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
Assert.fail("[common_words] or [common_words_path] is set");
} catch (IllegalArgumentException e) {} catch (IOException e) {
fail("expected IAE");
Expand Down Expand Up @@ -333,7 +333,7 @@ private Path createHome() throws IOException {
}

private static OpenSearchTestCase.TestAnalysis createTestAnalysisFromSettings(Settings settings) throws IOException {
return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,8 @@ private List<String> analyze(Settings settings, String analyzerName, String text
}

private AnalysisModule createAnalysisModule(Settings settings) throws IOException {
CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin();
return new AnalysisModule(TestEnvironment.newEnvironment(settings), Arrays.asList(commonAnalysisPlugin, new AnalysisPlugin() {
CommonAnalysisModulePlugin commonAnalysisModulePlugin = new CommonAnalysisModulePlugin();
return new AnalysisModule(TestEnvironment.newEnvironment(settings), Arrays.asList(commonAnalysisModulePlugin, new AnalysisPlugin() {
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("myfilter", MyFilterTokenFilterFactory::new);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public class ConcatenateGraphTokenFilterFactoryTests extends OpenSearchTokenStre
public void testSimpleTokenizerAndConcatenate() throws IOException {
OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(
Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);

TokenFilterFactory tokenFilter = analysis.tokenFilter.get("concatenate_graph");
Expand All @@ -47,7 +47,7 @@ public void testTokenizerCustomizedSeparator() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "+")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);

TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
Expand All @@ -65,7 +65,7 @@ public void testTokenizerEmptySeparator() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);

TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
Expand All @@ -83,7 +83,7 @@ public void testPreservePositionIncrementsDefault() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "+")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);

TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
Expand All @@ -106,7 +106,7 @@ public void testPreservePositionIncrementsTrue() throws IOException {
.put("index.analysis.filter.my_concatenate_graph.token_separator", "+")
.put("index.analysis.filter.my_concatenate_graph.preserve_position_increments", "true")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);

TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_concatenate_graph");
Expand All @@ -132,7 +132,7 @@ public void testGraph() throws IOException {
.put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace")
.put("index.analysis.analyzer.my_analyzer.filter", "my_word_delimiter, my_concatenate_graph")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);

String source = "PowerShot Is AweSome";
Expand Down Expand Up @@ -166,7 +166,7 @@ public void testInvalidSeparator() {
.put("index.analysis.filter.my_concatenate_graph.type", "concatenate_graph")
.put("index.analysis.filter.my_concatenate_graph.token_separator", "11")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
)
);
}
Expand All @@ -187,7 +187,7 @@ public void testMaxGraphExpansion() throws IOException {
.put("index.analysis.analyzer.my_analyzer.tokenizer", "whitespace")
.put("index.analysis.analyzer.my_analyzer.filter", "my_word_delimiter, my_concatenate_graph")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);

String source = "PowerShot Is AweSome";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public class DisableGraphQueryTests extends OpenSearchSingleNodeTestCase {

@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singleton(CommonAnalysisPlugin.class);
return Collections.singleton(CommonAnalysisModulePlugin.class);
}

@Before
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public void testDefault() throws IOException {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.my_edge_ngram.type", "edge_ngram")
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_edge_ngram");
String source = "foo";
Expand All @@ -69,7 +69,7 @@ public void testPreserveOriginal() throws IOException {
.put("index.analysis.filter.my_edge_ngram.type", "edge_ngram")
.put("index.analysis.filter.my_edge_ngram.preserve_original", true)
.build(),
new CommonAnalysisPlugin()
new CommonAnalysisModulePlugin()
);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_edge_ngram");
String source = "foo";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ private IndexAnalyzers buildAnalyzers(Version version, String tokenizer) throws
.put("index.analysis.analyzer.my_analyzer.tokenizer", tokenizer)
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
return new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(new CommonAnalysisPlugin()))
return new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(new CommonAnalysisModulePlugin()))
.getAnalysisRegistry()
.build(idxSettings);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public void testElisionFilterWithNoArticles() throws IOException {

IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())
() -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisModulePlugin())
);

assertEquals("elision filter requires [articles] or [articles_path] setting", e.getMessage());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
public class HighlighterWithAnalyzersTests extends OpenSearchIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(CommonAnalysisPlugin.class);
return Arrays.asList(CommonAnalysisModulePlugin.class);
}

public void testNgramHighlightingWithBrokenPositions() throws IOException {
Expand Down
Loading

0 comments on commit f46f7eb

Please sign in to comment.