diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index afbfc747541d7..cd6c7c36ee696 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -147,7 +147,7 @@ class ClusterConfiguration { // map from destination path, to source file Map extraConfigFiles = new HashMap<>() - LinkedHashMap plugins = new LinkedHashMap<>() + LinkedHashMap plugins = new LinkedHashMap<>() List modules = new ArrayList<>() @@ -185,6 +185,11 @@ class ClusterConfiguration { plugins.put(pluginProject.name, pluginProject) } + @Input + void mavenPlugin(String name, String mavenCoords) { + plugins.put(name, mavenCoords) + } + /** Add a module to the cluster. The project must be an esplugin and have a single zip default artifact. */ @Input void module(Project moduleProject) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 966b5801cc800..70c1cf84ed119 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -99,8 +99,8 @@ class ClusterFormationTasks { // from mirrors using gradles built-in mechanism etc. configureDistributionDependency(project, config.distribution, bwcDistro, config.bwcVersion) - for (Map.Entry entry : config.plugins.entrySet()) { - configureBwcPluginDependency("${prefix}_elasticsearchBwcPlugins", project, entry.getValue(), bwcPlugins, config.bwcVersion) + for (Map.Entry entry : config.plugins.entrySet()) { + configureBwcPluginDependency(project, entry.getValue(), bwcPlugins, config.bwcVersion) } bwcDistro.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS) bwcPlugins.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS) @@ -150,10 +150,15 @@ class ClusterFormationTasks { } /** Adds a dependency on a different version of the given plugin, which will be retrieved using gradle's dependency resolution */ - static void configureBwcPluginDependency(String name, Project project, Project pluginProject, Configuration configuration, Version elasticsearchVersion) { - verifyProjectHasBuildPlugin(name, elasticsearchVersion, project, pluginProject) - final String pluginName = findPluginName(pluginProject) - project.dependencies.add(configuration.name, "org.elasticsearch.plugin:${pluginName}:${elasticsearchVersion}@zip") + static void configureBwcPluginDependency(Project project, Object plugin, Configuration configuration, Version elasticsearchVersion) { + if (plugin instanceof Project) { + Project pluginProject = (Project)plugin + verifyProjectHasBuildPlugin(configuration.name, elasticsearchVersion, project, pluginProject) + final String pluginName = findPluginName(pluginProject) + project.dependencies.add(configuration.name, "org.elasticsearch.plugin:${pluginName}:${elasticsearchVersion}@zip") + } else { + project.dependencies.add(configuration.name, "${plugin}@zip") + } } /** @@ -210,9 +215,9 @@ class ClusterFormationTasks { } // install plugins - for (Map.Entry plugin : node.config.plugins.entrySet()) { - String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin') - setup = configureInstallPluginTask(taskName(prefix, node, actionName), project, setup, node, plugin.getValue(), prefix) + for (String pluginName : node.config.plugins.keySet()) { + String actionName = pluginTaskName('install', pluginName, 'Plugin') + setup = configureInstallPluginTask(taskName(prefix, node, actionName), project, setup, node, pluginName, prefix) } // sets up any extra config files that need to be copied over to the ES instance; @@ -448,31 +453,40 @@ class ClusterFormationTasks { Copy copyPlugins = project.tasks.create(name: name, type: Copy, dependsOn: setup) List pluginFiles = [] - for (Map.Entry plugin : node.config.plugins.entrySet()) { + for (Map.Entry plugin : node.config.plugins.entrySet()) { - Project pluginProject = plugin.getValue() - verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject) - String configurationName = pluginConfigurationName(prefix, pluginProject) + String configurationName = pluginConfigurationName(prefix, plugin.key) Configuration configuration = project.configurations.findByName(configurationName) if (configuration == null) { configuration = project.configurations.create(configurationName) } - project.dependencies.add(configurationName, project.dependencies.project(path: pluginProject.path, configuration: 'zip')) - setup.dependsOn(pluginProject.tasks.bundlePlugin) - - // also allow rest tests to use the rest spec from the plugin - String copyRestSpecTaskName = pluginTaskName('copy', plugin.getKey(), 'PluginRestSpec') - Copy copyRestSpec = project.tasks.findByName(copyRestSpecTaskName) - for (File resourceDir : pluginProject.sourceSets.test.resources.srcDirs) { - File restApiDir = new File(resourceDir, 'rest-api-spec/api') - if (restApiDir.exists() == false) continue - if (copyRestSpec == null) { - copyRestSpec = project.tasks.create(name: copyRestSpecTaskName, type: Copy) - copyPlugins.dependsOn(copyRestSpec) - copyRestSpec.into(project.sourceSets.test.output.resourcesDir) + + if (plugin.getValue() instanceof Project) { + Project pluginProject = plugin.getValue() + verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject) + + project.dependencies.add(configurationName, project.dependencies.project(path: pluginProject.path, configuration: 'zip')) + setup.dependsOn(pluginProject.tasks.bundlePlugin) + + // also allow rest tests to use the rest spec from the plugin + String copyRestSpecTaskName = pluginTaskName('copy', plugin.getKey(), 'PluginRestSpec') + Copy copyRestSpec = project.tasks.findByName(copyRestSpecTaskName) + for (File resourceDir : pluginProject.sourceSets.test.resources.srcDirs) { + File restApiDir = new File(resourceDir, 'rest-api-spec/api') + if (restApiDir.exists() == false) continue + if (copyRestSpec == null) { + copyRestSpec = project.tasks.create(name: copyRestSpecTaskName, type: Copy) + copyPlugins.dependsOn(copyRestSpec) + copyRestSpec.into(project.sourceSets.test.output.resourcesDir) + } + copyRestSpec.from(resourceDir).include('rest-api-spec/api/**') } - copyRestSpec.from(resourceDir).include('rest-api-spec/api/**') + } else { + project.dependencies.add(configurationName, "${plugin.getValue()}@zip") } + + + pluginFiles.add(configuration) } @@ -481,32 +495,37 @@ class ClusterFormationTasks { return copyPlugins } - private static String pluginConfigurationName(final String prefix, final Project project) { - return "_plugin_${prefix}_${project.path}".replace(':', '_') + private static String pluginConfigurationName(final String prefix, final String name) { + return "_plugin_${prefix}_${name}".replace(':', '_') } - private static String pluginBwcConfigurationName(final String prefix, final Project project) { - return "_plugin_bwc_${prefix}_${project.path}".replace(':', '_') + private static String pluginBwcConfigurationName(final String prefix, final String name) { + return "_plugin_bwc_${prefix}_${name}".replace(':', '_') } /** Configures task to copy a plugin based on a zip file resolved using dependencies for an older version */ static Task configureCopyBwcPluginsTask(String name, Project project, Task setup, NodeInfo node, String prefix) { Configuration bwcPlugins = project.configurations.getByName("${prefix}_elasticsearchBwcPlugins") - for (Map.Entry plugin : node.config.plugins.entrySet()) { - Project pluginProject = plugin.getValue() - verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject) - String configurationName = pluginBwcConfigurationName(prefix, pluginProject) + for (Map.Entry plugin : node.config.plugins.entrySet()) { + String configurationName = pluginBwcConfigurationName(prefix, plugin.key) Configuration configuration = project.configurations.findByName(configurationName) if (configuration == null) { configuration = project.configurations.create(configurationName) } - final String depName = findPluginName(pluginProject) + if (plugin.getValue() instanceof Project) { + Project pluginProject = plugin.getValue() + verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject) - Dependency dep = bwcPlugins.dependencies.find { - it.name == depName + final String depName = findPluginName(pluginProject) + + Dependency dep = bwcPlugins.dependencies.find { + it.name == depName + } + configuration.dependencies.add(dep) + } else { + project.dependencies.add(configurationName, "${plugin.getValue()}@zip") } - configuration.dependencies.add(dep) } Copy copyPlugins = project.tasks.create(name: name, type: Copy, dependsOn: setup) { @@ -531,12 +550,12 @@ class ClusterFormationTasks { return installModule } - static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Project plugin, String prefix) { + static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, String pluginName, String prefix) { final FileCollection pluginZip; if (node.nodeVersion != VersionProperties.elasticsearch) { - pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, plugin)) + pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, pluginName)) } else { - pluginZip = project.configurations.getByName(pluginConfigurationName(prefix, plugin)) + pluginZip = project.configurations.getByName(pluginConfigurationName(prefix, pluginName)) } // delay reading the file location until execution time by wrapping in a closure within a GString final Object file = "${-> new File(node.pluginsTmpDir, pluginZip.singleFile.getName()).toURI().toURL().toString()}" diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java index 9210526e7c81c..d32c37dc2c44f 100644 --- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java @@ -18,27 +18,19 @@ */ package org.elasticsearch.client.benchmark.rest; -import org.apache.http.HttpEntity; import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; import org.apache.http.HttpStatus; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.conn.ConnectionKeepAliveStrategy; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.benchmark.AbstractBenchmark; import org.elasticsearch.client.benchmark.ops.bulk.BulkRequestExecutor; import org.elasticsearch.client.benchmark.ops.search.SearchRequestExecutor; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -86,9 +78,10 @@ public boolean bulkIndex(List bulkData) { bulkRequestBody.append(bulkItem); bulkRequestBody.append("\n"); } - HttpEntity entity = new NStringEntity(bulkRequestBody.toString(), ContentType.APPLICATION_JSON); + Request request = new Request("POST", "/geonames/type/_noop_bulk"); + request.setJsonEntity(bulkRequestBody.toString()); try { - Response response = client.performRequest("POST", "/geonames/type/_noop_bulk", Collections.emptyMap(), entity); + Response response = client.performRequest(request); return response.getStatusLine().getStatusCode() == HttpStatus.SC_OK; } catch (Exception e) { throw new ElasticsearchException(e); @@ -107,9 +100,10 @@ private RestSearchRequestExecutor(RestClient client, String indexName) { @Override public boolean search(String source) { - HttpEntity searchBody = new NStringEntity(source, StandardCharsets.UTF_8); + Request request = new Request("GET", endpoint); + request.setJsonEntity(source); try { - Response response = client.performRequest("GET", endpoint, Collections.emptyMap(), searchBody); + Response response = client.performRequest(request); return response.getStatusLine().getStatusCode() == HttpStatus.SC_OK; } catch (IOException e) { throw new ElasticsearchException(e); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java index 7f59fcc831213..9782b1016b421 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java @@ -194,18 +194,16 @@ public void testBulkProcessorWaitOnClose() throws Exception { } public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception { - - String createIndexBody = "{\n" + + Request request = new Request("PUT", "/test-ro"); + request.setJsonEntity("{\n" + " \"settings\" : {\n" + " \"index\" : {\n" + " \"blocks.write\" : true\n" + " }\n" + " }\n" + " \n" + - "}"; - - NStringEntity entity = new NStringEntity(createIndexBody, ContentType.APPLICATION_JSON); - Response response = client().performRequest("PUT", "/test-ro", Collections.emptyMap(), entity); + "}"); + Response response = client().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); int bulkActions = randomIntBetween(10, 100); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 352a6a5e61d1b..8595bd16b63be 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -19,9 +19,6 @@ package org.elasticsearch.client; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.DocWriteRequest; @@ -39,6 +36,7 @@ import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.Strings; @@ -147,11 +145,10 @@ public void testExists() throws IOException { GetRequest getRequest = new GetRequest("index", "type", "id"); assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); } - String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; - StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); - Response response = client().performRequest(HttpPut.METHOD_NAME, "/index/type/id", Collections.singletonMap("refresh", "wait_for"), - stringEntity); - assertEquals(201, response.getStatusLine().getStatusCode()); + IndexRequest index = new IndexRequest("index", "type", "id"); + index.source("{\"field1\":\"value1\",\"field2\":\"value2\"}", XContentType.JSON); + index.setRefreshPolicy(RefreshPolicy.IMMEDIATE); + highLevelClient().index(index); { GetRequest getRequest = new GetRequest("index", "type", "id"); assertTrue(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync)); @@ -175,12 +172,11 @@ public void testGet() throws IOException { assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", exception.getMessage()); assertEquals("index", exception.getMetadata("es.index").get(0)); } - + IndexRequest index = new IndexRequest("index", "type", "id"); String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}"; - StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); - Response response = client().performRequest(HttpPut.METHOD_NAME, "/index/type/id", Collections.singletonMap("refresh", "wait_for"), - stringEntity); - assertEquals(201, response.getStatusLine().getStatusCode()); + index.source(document, XContentType.JSON); + index.setRefreshPolicy(RefreshPolicy.IMMEDIATE); + highLevelClient().index(index); { GetRequest getRequest = new GetRequest("index", "type", "id").version(2); ElasticsearchException exception = expectThrows(ElasticsearchException.class, @@ -271,18 +267,15 @@ public void testMultiGet() throws IOException { assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", response.getResponses()[1].getFailure().getFailure().getMessage()); } - - String document = "{\"field\":\"value1\"}"; - StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); - Response r = client().performRequest(HttpPut.METHOD_NAME, "/index/type/id1", Collections.singletonMap("refresh", "true"), - stringEntity); - assertEquals(201, r.getStatusLine().getStatusCode()); - - document = "{\"field\":\"value2\"}"; - stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON); - r = client().performRequest(HttpPut.METHOD_NAME, "/index/type/id2", Collections.singletonMap("refresh", "true"), stringEntity); - assertEquals(201, r.getStatusLine().getStatusCode()); - + BulkRequest bulk = new BulkRequest(); + bulk.setRefreshPolicy(RefreshPolicy.IMMEDIATE); + IndexRequest index = new IndexRequest("index", "type", "id1"); + index.source("{\"field\":\"value1\"}", XContentType.JSON); + bulk.add(index); + index = new IndexRequest("index", "type", "id2"); + index.source("{\"field\":\"value2\"}", XContentType.JSON); + bulk.add(index); + highLevelClient().bulk(bulk); { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "type", "id1"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 09a3fbd4d16a8..de7fdf3a2a23d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -19,8 +19,6 @@ package org.elasticsearch.client.documentation; -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ContentType; import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchException; @@ -49,6 +47,7 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.Strings; @@ -58,6 +57,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.rest.RestStatus; @@ -274,16 +274,15 @@ public void testUpdate() throws Exception { IndexResponse indexResponse = client.index(indexRequest); assertSame(indexResponse.status(), RestStatus.CREATED); - XContentType xContentType = XContentType.JSON; - String script = Strings.toString(XContentBuilder.builder(xContentType.xContent()) + Request request = new Request("POST", "/_scripts/increment-field"); + request.setJsonEntity(Strings.toString(JsonXContent.contentBuilder() .startObject() .startObject("script") .field("lang", "painless") .field("code", "ctx._source.field += params.count") .endObject() - .endObject()); - HttpEntity body = new NStringEntity(script, ContentType.create(xContentType.mediaType())); - Response response = client().performRequest(HttpPost.METHOD_NAME, "/_scripts/increment-field", emptyMap(), body); + .endObject())); + Response response = client().performRequest(request); assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus()); } { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java index fe9e8108acee3..e1e08f120a2c9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationDocumentationIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; import org.apache.http.HttpEntity; @@ -66,58 +67,22 @@ * -------------------------------------------------- */ public class MigrationDocumentationIT extends ESRestHighLevelClientTestCase { - - public void testCreateIndex() throws IOException { - RestHighLevelClient client = highLevelClient(); - { - //tag::migration-create-index - Settings indexSettings = Settings.builder() // <1> - .put(SETTING_NUMBER_OF_SHARDS, 1) - .put(SETTING_NUMBER_OF_REPLICAS, 0) - .build(); - - String payload = Strings.toString(XContentFactory.jsonBuilder() // <2> - .startObject() - .startObject("settings") // <3> - .value(indexSettings) - .endObject() - .startObject("mappings") // <4> - .startObject("doc") - .startObject("properties") - .startObject("time") - .field("type", "date") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject()); - - HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); // <5> - - Response response = client.getLowLevelClient().performRequest("PUT", "my-index", emptyMap(), entity); // <6> - if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { - // <7> - } - //end::migration-create-index - assertEquals(200, response.getStatusLine().getStatusCode()); - } - } - public void testClusterHealth() throws IOException { RestHighLevelClient client = highLevelClient(); { //tag::migration-cluster-health - Map parameters = singletonMap("wait_for_status", "green"); - Response response = client.getLowLevelClient().performRequest("GET", "/_cluster/health", parameters); // <1> + Request request = new Request("GET", "/_cluster/health"); + request.addParameter("wait_for_status", "green"); // <1> + Response response = client.getLowLevelClient().performRequest(request); // <2> ClusterHealthStatus healthStatus; - try (InputStream is = response.getEntity().getContent()) { // <2> - Map map = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); // <3> - healthStatus = ClusterHealthStatus.fromString((String) map.get("status")); // <4> + try (InputStream is = response.getEntity().getContent()) { // <3> + Map map = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); // <4> + healthStatus = ClusterHealthStatus.fromString((String) map.get("status")); // <5> } - if (healthStatus == ClusterHealthStatus.GREEN) { - // <5> + if (healthStatus != ClusterHealthStatus.GREEN) { + // <6> } //end::migration-cluster-health assertSame(ClusterHealthStatus.GREEN, healthStatus); diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java index 5979c508de287..667e38a5167d7 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java @@ -351,11 +351,12 @@ private Response bodyTest(final String method) throws IOException { private Response bodyTest(final RestClient restClient, final String method) throws IOException { String requestBody = "{ \"field\": \"value\" }"; - StringEntity entity = new StringEntity(requestBody, ContentType.APPLICATION_JSON); int statusCode = randomStatusCode(getRandom()); + Request request = new Request(method, "/" + statusCode); + request.setJsonEntity(requestBody); Response esResponse; try { - esResponse = restClient.performRequest(method, "/" + statusCode, Collections.emptyMap(), entity); + esResponse = restClient.performRequest(request); } catch(ResponseException e) { esResponse = e.getResponse(); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 2d419b213d686..714d2e57e6d20 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -58,11 +58,9 @@ import java.net.URI; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc index ad06c26e2896f..593f9119a1142 100644 --- a/docs/CHANGELOG.asciidoc +++ b/docs/CHANGELOG.asciidoc @@ -316,6 +316,8 @@ analysis module. ({pull}30397[#30397]) Highlighting:: * Limit analyzed text for highlighting (improvements) {pull}28808[#28808] (issues: {issue}16764[#16764], {issue}27934[#27934]) {ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow copying source settings on index resize operations] ({pull}30255[#30255]) +{ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow +copying source settings on index resize operations] ({pull}30255[#30255], {pull}30404[#30404]) Added new "Request" object flavored request methods in the RestClient. Prefer these instead of the multi-argument versions. ({pull}29623[#29623]) diff --git a/docs/java-rest/high-level/migration.asciidoc b/docs/java-rest/high-level/migration.asciidoc index 1349ccb35fe3b..ad4e0613fc14a 100644 --- a/docs/java-rest/high-level/migration.asciidoc +++ b/docs/java-rest/high-level/migration.asciidoc @@ -2,7 +2,7 @@ == Migration Guide This section describes how to migrate existing code from the `TransportClient` -to the new Java High Level REST Client released with the version 5.6.0 +to the Java High Level REST Client released with the version 5.6.0 of Elasticsearch. === Motivations around a new Java client @@ -107,9 +107,6 @@ More importantly, the high-level client: request constructors like `new IndexRequest()` to create requests objects. The requests are then executed using synchronous or asynchronous dedicated methods like `client.index()` or `client.indexAsync()`. -- does not provide indices or cluster management APIs. Management -operations can be executed by external scripts or -<>. ==== How to migrate the way requests are built @@ -241,71 +238,6 @@ returned by the cluster. <4> The `onFailure()` method is called when an error occurs during the execution of the request. -[[java-rest-high-level-migration-manage-indices]] -==== Manage Indices using the Low-Level REST Client - -The low-level client is able to execute any kind of HTTP requests, and can -therefore be used to call the APIs that are not yet supported by the high level client. - -For example, creating a new index with the `TransportClient` may look like this: - -[source,java] --------------------------------------------------- -Settings settings = Settings.builder() // <1> - .put(SETTING_NUMBER_OF_SHARDS, 1) - .put(SETTING_NUMBER_OF_REPLICAS, 0) - .build(); - -String mappings = XContentFactory.jsonBuilder() // <2> - .startObject() - .startObject("doc") - .startObject("properties") - .startObject("time") - .field("type", "date") - .endObject() - .endObject() - .endObject() - .endObject() - .string(); - -CreateIndexResponse response = transportClient.admin().indices() // <3> - .prepareCreate("my-index") - .setSettings(indexSettings) - .addMapping("doc", docMapping, XContentType.JSON) - .get(); - -if (response.isAcknowledged() == false) { - // <4> -} --------------------------------------------------- -<1> Define the settings of the index -<2> Define the mapping for document of type `doc` using a -`XContentBuilder` -<3> Create the index with the previous settings and mapping -using the `prepareCreate()` method. The execution is synchronous -and blocks on the `get()` method until the remote cluster returns -a response. -<4> Handle the situation where the index has not been created - -The same operation executed with the low-level client could be: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/MigrationDocumentationIT.java[migration-create-index] --------------------------------------------------- -<1> Define the settings of the index -<2> Define the body of the HTTP request using a `XContentBuilder` with JSON format -<3> Include the settings in the request body -<4> Include the mappings in the request body -<5> Convert the request body from `String` to a `HttpEntity` and -set its content type (here, JSON) -<6> Execute the request using the low-level client. The execution is synchronous -and blocks on the `performRequest()` method until the remote cluster returns -a response. The low-level client can be retrieved from an existing `RestHighLevelClient` -instance through the `getLowLevelClient` getter method. -<7> Handle the situation where the index has not been created - - [[java-rest-high-level-migration-cluster-health]] ==== Checking Cluster Health using the Low-Level REST Client @@ -331,18 +263,18 @@ With the low-level client, the code can be changed to: -------------------------------------------------- include-tagged::{doc-tests}/MigrationDocumentationIT.java[migration-cluster-health] -------------------------------------------------- -<1> Call the cluster's health REST endpoint and wait for the cluster health to become green, -then get back a `Response` object. -<2> Retrieve an `InputStream` object in order to read the response's content -<3> Parse the response's content using Elasticsearch's helper class `XContentHelper`. This +<1> Set up the request to wait for the cluster's health to become green if it isn't already. +<2> Make the request and the get back a `Response` object. +<3> Retrieve an `InputStream` object in order to read the response's content +<4> Parse the response's content using Elasticsearch's helper class `XContentHelper`. This helper requires the content type of the response to be passed as an argument and returns a `Map` of objects. Values in the map can be of any type, including inner `Map` that are used to represent the JSON object hierarchy. -<4> Retrieve the value of the `status` field in the response map, casts it as a a `String` +<5> Retrieve the value of the `status` field in the response map, casts it as a a `String` object and use the `ClusterHealthStatus.fromString()` method to convert it as a `ClusterHealthStatus` object. This method throws an exception if the value does not corresponds to a valid cluster health status. -<5> Handle the situation where the cluster's health is not green +<6> Handle the situation where the cluster's health is not green Note that for convenience this example uses Elasticsearch's helpers to parse the JSON response body, but any other JSON parser could have been use instead. diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index f96cbfa1f2ceb..72262ce13f9d9 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -103,9 +103,14 @@ specific index module: `index.auto_expand_replicas`:: - Auto-expand the number of replicas based on the number of available nodes. + Auto-expand the number of replicas based on the number of data nodes in the cluster. Set to a dash delimited lower and upper bound (e.g. `0-5`) or use `all` - for the upper bound (e.g. `0-all`). Defaults to `false` (i.e. disabled). + for the upper bound (e.g. `0-all`). Defaults to `false` (i.e. disabled). + Note that the auto-expanded number of replicas does not take any other allocation + rules into account, such as <>, + <> or <>, + and this can lead to the cluster health becoming `YELLOW` if the applicable rules + prevent all the replicas from being allocated. `index.refresh_interval`:: diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc index 81d79c47472df..496ae7253ce9c 100644 --- a/docs/reference/indices/shrink-index.asciidoc +++ b/docs/reference/indices/shrink-index.asciidoc @@ -62,11 +62,20 @@ the following request: [source,js] -------------------------------------------------- -POST my_source_index/_shrink/my_target_index +POST my_source_index/_shrink/my_target_index?copy_settings=true +{ + "settings": { + "index.routing.allocation.require._name": null, <1> + "index.blocks.write": null <2> + } +} -------------------------------------------------- // CONSOLE // TEST[continued] +<1> Clear the allocation requirement copied from the source index. +<2> Clear the index write block copied from the source index. + The above request returns immediately once the target index has been added to the cluster state -- it doesn't wait for the shrink operation to start. @@ -97,7 +106,7 @@ and accepts `settings` and `aliases` parameters for the target index: [source,js] -------------------------------------------------- -POST my_source_index/_shrink/my_target_index +POST my_source_index/_shrink/my_target_index?copy_settings=true { "settings": { "index.number_of_replicas": 1, @@ -125,9 +134,11 @@ NOTE: By default, with the exception of `index.analysis`, `index.similarity`, and `index.sort` settings, index settings on the source index are not copied during a shrink operation. With the exception of non-copyable settings, settings from the source index can be copied to the target index by adding the URL -parameter `copy_settings=true` to the request. +parameter `copy_settings=true` to the request. Note that `copy_settings` can not +be set to `false`. The parameter `copy_settings` will be removed in 8.0.0 -deprecated[6.4.0, `copy_settings` will default to `true` in 8.x and will be removed in 9.0.0] +deprecated[6.4.0, not copying settings is deprecated, copying settings will be +the default behavior in 7.x] [float] === Monitoring the shrink process diff --git a/docs/reference/indices/split-index.asciidoc b/docs/reference/indices/split-index.asciidoc index 1f5c0df9484ce..2734ff4cb5719 100644 --- a/docs/reference/indices/split-index.asciidoc +++ b/docs/reference/indices/split-index.asciidoc @@ -117,7 +117,7 @@ the following request: [source,js] -------------------------------------------------- -POST my_source_index/_split/my_target_index +POST my_source_index/_split/my_target_index?copy_settings=true { "settings": { "index.number_of_shards": 2 @@ -152,7 +152,7 @@ and accepts `settings` and `aliases` parameters for the target index: [source,js] -------------------------------------------------- -POST my_source_index/_split/my_target_index +POST my_source_index/_split/my_target_index?copy_settings=true { "settings": { "index.number_of_shards": 5 <1> @@ -175,9 +175,11 @@ NOTE: By default, with the exception of `index.analysis`, `index.similarity`, and `index.sort` settings, index settings on the source index are not copied during a split operation. With the exception of non-copyable settings, settings from the source index can be copied to the target index by adding the URL -parameter `copy_settings=true` to the request. +parameter `copy_settings=true` to the request. Note that `copy_settings` can not +be set to `false`. The parameter `copy_settings` will be removed in 8.0.0 -deprecated[6.4.0, `copy_settings` will default to `true` in 8.x and will be removed in 9.0.0] +deprecated[6.4.0, not copying settings is deprecated, copying settings will be +the default behavior in 7.x] [float] === Monitoring the split process diff --git a/gradle.properties b/gradle.properties index 2511c740bb5b1..08b03629ad53a 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,2 @@ org.gradle.daemon=false -org.gradle.jvmargs=-Xmx1536m +org.gradle.jvmargs=-Xmx2g diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CharMatcher.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharMatcher.java similarity index 99% rename from server/src/main/java/org/elasticsearch/index/analysis/CharMatcher.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharMatcher.java index b9e70d05bb77b..3d8bb8d275394 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CharMatcher.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharMatcher.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import java.util.HashSet; import java.util.Set; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ClassicTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java similarity index 87% rename from server/src/main/java/org/elasticsearch/index/analysis/ClassicTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java index 11f36dfa17702..e81f6b88d248c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ClassicTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.ClassicTokenizer; @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; /** * Factory for {@link ClassicTokenizer} @@ -33,7 +34,7 @@ public class ClassicTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; - public ClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + ClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index a01eb52fdd498..c9b48f0c8650d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -34,9 +34,11 @@ import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.core.KeywordTokenizer; +import org.apache.lucene.analysis.core.LetterTokenizer; import org.apache.lucene.analysis.core.LowerCaseTokenizer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.UpperCaseFilter; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.cz.CzechStemFilter; import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; @@ -58,17 +60,25 @@ import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter; import org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; +import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenFilter; +import org.apache.lucene.analysis.ngram.NGramTokenizer; +import org.apache.lucene.analysis.path.PathHierarchyTokenizer; +import org.apache.lucene.analysis.pattern.PatternTokenizer; import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.ClassicFilter; +import org.apache.lucene.analysis.standard.ClassicTokenizer; +import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; +import org.apache.lucene.analysis.th.ThaiTokenizer; import org.apache.lucene.analysis.tr.ApostropheFilter; import org.apache.lucene.analysis.util.ElisionFilter; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; @@ -169,6 +179,19 @@ public Map> getTokenizers() { Map> tokenizers = new TreeMap<>(); tokenizers.put("simple_pattern", SimplePatternTokenizerFactory::new); tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new); + tokenizers.put("thai", ThaiTokenizerFactory::new); + tokenizers.put("nGram", NGramTokenizerFactory::new); + tokenizers.put("ngram", NGramTokenizerFactory::new); + tokenizers.put("edgeNGram", EdgeNGramTokenizerFactory::new); + tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new); + tokenizers.put("classic", ClassicTokenizerFactory::new); + tokenizers.put("letter", LetterTokenizerFactory::new); + tokenizers.put("lowercase", LowerCaseTokenizerFactory::new); + tokenizers.put("path_hierarchy", PathHierarchyTokenizerFactory::new); + tokenizers.put("PathHierarchy", PathHierarchyTokenizerFactory::new); + tokenizers.put("pattern", PatternTokenizerFactory::new); + tokenizers.put("uax_url_email", UAX29URLEmailTokenizerFactory::new); + tokenizers.put("whitespace", WhitespaceTokenizerFactory::new); return tokenizers; } @@ -283,6 +306,16 @@ public List getPreConfiguredTokenFilters() { public List getPreConfiguredTokenizers() { List tokenizers = new ArrayList<>(); tokenizers.add(PreConfiguredTokenizer.singleton("keyword", KeywordTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("classic", ClassicTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("uax_url_email", UAX29URLEmailTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("path_hierarchy", PathHierarchyTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("edge_ngram", + () -> new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE), null)); + tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1), null)); + tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new, null)); tokenizers.add(PreConfiguredTokenizer.singleton("lowercase", LowerCaseTokenizer::new, () -> new TokenFilterFactory() { @Override public String name() { @@ -294,6 +327,13 @@ public TokenStream create(TokenStream tokenStream) { return new LowerCaseFilter(tokenStream); } })); + + // Temporary shim for aliases. TODO deprecate after they are moved + tokenizers.add(PreConfiguredTokenizer.singleton("nGram", NGramTokenizer::new, null)); + tokenizers.add(PreConfiguredTokenizer.singleton("edgeNGram", + () -> new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE), null)); + tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new, null)); + return tokenizers; } } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java similarity index 86% rename from server/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java index 8210883b2f8f5..55a527cc792c8 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; @@ -25,19 +25,17 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; -import static org.elasticsearch.index.analysis.NGramTokenizerFactory.parseTokenChars; +import static org.elasticsearch.analysis.common.NGramTokenizerFactory.parseTokenChars; public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory { private final int minGram; - private final int maxGram; - private final CharMatcher matcher; - - public EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/LetterTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java similarity index 84% rename from server/src/main/java/org/elasticsearch/index/analysis/LetterTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java index 364c236762391..be98eb73a9cad 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/LetterTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java @@ -17,17 +17,18 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.LetterTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class LetterTokenizerFactory extends AbstractTokenizerFactory { - public LetterTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + LetterTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/LowerCaseTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java similarity index 82% rename from server/src/main/java/org/elasticsearch/index/analysis/LowerCaseTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java index 16939f0d153a5..8f0c5f759aa64 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/LowerCaseTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java @@ -17,17 +17,19 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.LowerCaseTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; +import org.elasticsearch.index.analysis.MultiTermAwareComponent; public class LowerCaseTokenizerFactory extends AbstractTokenizerFactory implements MultiTermAwareComponent { - public LowerCaseTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + LowerCaseTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java similarity index 95% rename from server/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java index fa69b66ad8bf9..fcf7536d06cc3 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java @@ -17,13 +17,14 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; import java.lang.reflect.Field; import java.lang.reflect.Modifier; @@ -82,7 +83,7 @@ static CharMatcher parseTokenChars(List characterClasses) { return builder.build(); } - public NGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + NGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); int maxAllowedNgramDiff = indexSettings.getMaxNgramDiff(); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PathHierarchyTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java similarity index 92% rename from server/src/main/java/org/elasticsearch/index/analysis/PathHierarchyTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java index b69a37b65c25f..d9bfa786591e1 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PathHierarchyTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.path.PathHierarchyTokenizer; @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class PathHierarchyTokenizerFactory extends AbstractTokenizerFactory { @@ -35,7 +36,7 @@ public class PathHierarchyTokenizerFactory extends AbstractTokenizerFactory { private final int skip; private final boolean reverse; - public PathHierarchyTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + PathHierarchyTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); bufferSize = settings.getAsInt("buffer_size", 1024); String delimiter = settings.get("delimiter"); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java similarity index 88% rename from server/src/main/java/org/elasticsearch/index/analysis/PatternTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java index d11d88c085ea4..f850b68ac9829 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.pattern.PatternTokenizer; @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; import java.util.regex.Pattern; @@ -33,7 +34,7 @@ public class PatternTokenizerFactory extends AbstractTokenizerFactory { private final Pattern pattern; private final int group; - public PatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + PatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); String sPattern = settings.get("pattern", "\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ThaiTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/ThaiTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java index 7f702192f1af3..b76aca42d36ee 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ThaiTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java @@ -17,20 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.th.ThaiTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; /** * Factory for {@link ThaiTokenizer} */ public class ThaiTokenizerFactory extends AbstractTokenizerFactory { - public ThaiTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + ThaiTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java similarity index 87% rename from server/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java index 79eb0c604d995..8040c88ea7fa5 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/UAX29URLEmailTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; @@ -25,12 +25,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class UAX29URLEmailTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; - public UAX29URLEmailTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + UAX29URLEmailTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } @@ -41,4 +42,4 @@ public Tokenizer create() { tokenizer.setMaxTokenLength(maxTokenLength); return tokenizer; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/WhitespaceTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java similarity index 87% rename from server/src/main/java/org/elasticsearch/index/analysis/WhitespaceTokenizerFactory.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java index c71747a596d6b..1f89d4688136f 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/WhitespaceTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; @@ -26,13 +26,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class WhitespaceTokenizerFactory extends AbstractTokenizerFactory { static final String MAX_TOKEN_LENGTH = "max_token_length"; private Integer maxTokenLength; - public WhitespaceTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + WhitespaceTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); maxTokenLength = settings.getAsInt(MAX_TOKEN_LENGTH, StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/server/src/test/java/org/elasticsearch/index/analysis/CharMatcherTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CharMatcherTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/index/analysis/CharMatcherTests.java rename to modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CharMatcherTests.java index 31f80a66cdacc..1427e5d84513f 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/CharMatcherTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CharMatcherTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.elasticsearch.test.ESTestCase; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java index befd26296a5c8..7deadcbcc25f6 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilterFactory; import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory; import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory; +import org.elasticsearch.index.analysis.KeywordTokenizerFactory; import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory; import org.elasticsearch.index.analysis.SynonymTokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase; @@ -45,6 +46,16 @@ protected Map> getTokenizers() { Map> tokenizers = new TreeMap<>(super.getTokenizers()); tokenizers.put("simplepattern", SimplePatternTokenizerFactory.class); tokenizers.put("simplepatternsplit", SimplePatternSplitTokenizerFactory.class); + tokenizers.put("thai", ThaiTokenizerFactory.class); + tokenizers.put("ngram", NGramTokenizerFactory.class); + tokenizers.put("edgengram", EdgeNGramTokenizerFactory.class); + tokenizers.put("classic", ClassicTokenizerFactory.class); + tokenizers.put("letter", LetterTokenizerFactory.class); + tokenizers.put("lowercase", LowerCaseTokenizerFactory.class); + tokenizers.put("pathhierarchy", PathHierarchyTokenizerFactory.class); + tokenizers.put("pattern", PatternTokenizerFactory.class); + tokenizers.put("uax29urlemail", UAX29URLEmailTokenizerFactory.class); + tokenizers.put("whitespace", WhitespaceTokenizerFactory.class); return tokenizers; } @@ -211,10 +222,25 @@ protected Map> getPreConfiguredTokenFilters() { @Override protected Map> getPreConfiguredTokenizers() { - Map> filters = new TreeMap<>(super.getPreConfiguredTokenizers()); - filters.put("keyword", null); - filters.put("lowercase", null); - return filters; + Map> tokenizers = new TreeMap<>(super.getPreConfiguredTokenizers()); + tokenizers.put("keyword", null); + tokenizers.put("lowercase", null); + tokenizers.put("classic", null); + tokenizers.put("uax_url_email", org.apache.lucene.analysis.standard.UAX29URLEmailTokenizerFactory.class); + tokenizers.put("path_hierarchy", null); + tokenizers.put("letter", null); + tokenizers.put("whitespace", null); + tokenizers.put("ngram", null); + tokenizers.put("edge_ngram", null); + tokenizers.put("pattern", null); + tokenizers.put("thai", null); + + // TODO drop aliases once they are moved to module + tokenizers.put("nGram", tokenizers.get("ngram")); + tokenizers.put("edgeNGram", tokenizers.get("edge_ngram")); + tokenizers.put("PathHierarchy", tokenizers.get("path_hierarchy")); + + return tokenizers; } /** diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java index 8efc0d5941f9e..2453ecd1e7f12 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java @@ -45,7 +45,7 @@ public void testDefault() throws IOException { .build(); try { - AnalysisTestsHelper.createTestAnalysisFromSettings(settings); + AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); Assert.fail("[common_words] or [common_words_path] is set"); } catch (IllegalArgumentException e) { } catch (IOException e) { diff --git a/server/src/test/java/org/elasticsearch/index/query/DisableGraphQueryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/DisableGraphQueryTests.java similarity index 85% rename from server/src/test/java/org/elasticsearch/index/query/DisableGraphQueryTests.java rename to modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/DisableGraphQueryTests.java index 30ecb9034354e..d1792e94f7331 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DisableGraphQueryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/DisableGraphQueryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.analysis.common; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; @@ -29,12 +29,22 @@ import org.apache.lucene.search.MultiPhraseQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.query.MatchPhraseQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.MultiMatchQueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryStringQueryBuilder; +import org.elasticsearch.index.query.SimpleQueryStringBuilder; +import org.elasticsearch.index.query.SimpleQueryStringFlag; import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; import static org.hamcrest.Matchers.equalTo; @@ -49,6 +59,11 @@ public class DisableGraphQueryTests extends ESSingleNodeTestCase { private static Query expectedQueryWithUnigram; private static Query expectedPhraseQueryWithUnigram; + @Override + protected Collection> getPlugins() { + return Collections.singleton(CommonAnalysisPlugin.class); + } + @Before public void setup() { Settings settings = Settings.builder() @@ -150,42 +165,42 @@ public void cleanup() { public void testMatchPhraseQuery() throws IOException { MatchPhraseQueryBuilder builder = new MatchPhraseQueryBuilder("text_shingle_unigram", "foo bar baz"); - Query query = builder.doToQuery(shardContext); + Query query = builder.toQuery(shardContext); assertThat(expectedPhraseQueryWithUnigram, equalTo(query)); builder = new MatchPhraseQueryBuilder("text_shingle", "foo bar baz biz"); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedPhraseQuery, equalTo(query)); } public void testMatchQuery() throws IOException { MatchQueryBuilder builder = new MatchQueryBuilder("text_shingle_unigram", "foo bar baz"); - Query query = builder.doToQuery(shardContext); + Query query = builder.toQuery(shardContext); assertThat(expectedQueryWithUnigram, equalTo(query)); builder = new MatchQueryBuilder("text_shingle", "foo bar baz biz"); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedQuery, equalTo(query)); } public void testMultiMatchQuery() throws IOException { MultiMatchQueryBuilder builder = new MultiMatchQueryBuilder("foo bar baz", "text_shingle_unigram"); - Query query = builder.doToQuery(shardContext); + Query query = builder.toQuery(shardContext); assertThat(expectedQueryWithUnigram, equalTo(query)); builder.type(MatchQuery.Type.PHRASE); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedPhraseQueryWithUnigram, equalTo(query)); builder = new MultiMatchQueryBuilder("foo bar baz biz", "text_shingle"); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedQuery, equalTo(query)); builder.type(MatchQuery.Type.PHRASE); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedPhraseQuery, equalTo(query)); } @@ -193,47 +208,47 @@ public void testSimpleQueryString() throws IOException { SimpleQueryStringBuilder builder = new SimpleQueryStringBuilder("foo bar baz"); builder.field("text_shingle_unigram"); builder.flags(SimpleQueryStringFlag.NONE); - Query query = builder.doToQuery(shardContext); + Query query = builder.toQuery(shardContext); assertThat(expectedQueryWithUnigram, equalTo(query)); builder = new SimpleQueryStringBuilder("\"foo bar baz\""); builder.field("text_shingle_unigram"); builder.flags(SimpleQueryStringFlag.PHRASE); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedPhraseQueryWithUnigram, equalTo(query)); builder = new SimpleQueryStringBuilder("foo bar baz biz"); builder.field("text_shingle"); builder.flags(SimpleQueryStringFlag.NONE); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedQuery, equalTo(query)); builder = new SimpleQueryStringBuilder("\"foo bar baz biz\""); builder.field("text_shingle"); builder.flags(SimpleQueryStringFlag.PHRASE); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedPhraseQuery, equalTo(query)); } public void testQueryString() throws IOException { QueryStringQueryBuilder builder = new QueryStringQueryBuilder("foo bar baz"); builder.field("text_shingle_unigram"); - Query query = builder.doToQuery(shardContext); + Query query = builder.toQuery(shardContext); assertThat(expectedQueryWithUnigram, equalTo(query)); builder = new QueryStringQueryBuilder("\"foo bar baz\""); builder.field("text_shingle_unigram"); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedPhraseQueryWithUnigram, equalTo(query)); builder = new QueryStringQueryBuilder("foo bar baz biz"); builder.field("text_shingle"); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedQuery, equalTo(query)); builder = new QueryStringQueryBuilder("\"foo bar baz biz\""); builder.field("text_shingle"); - query = builder.doToQuery(shardContext); + query = builder.toQuery(shardContext); assertThat(expectedPhraseQuery, equalTo(query)); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java index 3f4641c7c189b..65c0940784bf1 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java @@ -30,8 +30,6 @@ import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory; -import org.elasticsearch.index.analysis.NGramTokenizerFactory; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PathHierarchyTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactoryTests.java similarity index 99% rename from server/src/test/java/org/elasticsearch/index/analysis/PathHierarchyTokenizerFactoryTests.java rename to modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactoryTests.java index 39b96a2cae454..0b545d3355201 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PathHierarchyTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import com.carrotsearch.randomizedtesting.generators.RandomPicks; diff --git a/server/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java rename to modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java index 0bc229c9328cf..3c602c1713b2e 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java @@ -17,9 +17,8 @@ * under the License. */ -package org.elasticsearch.index.analysis.synonyms; +package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -46,7 +45,6 @@ import static org.hamcrest.Matchers.startsWith; public class SynonymsAnalysisTests extends ESTestCase { - protected final Logger logger = Loggers.getLogger(getClass()); private IndexAnalyzers indexAnalyzers; public void testSynonymsAnalysis() throws IOException { @@ -58,14 +56,14 @@ public void testSynonymsAnalysis() throws IOException { Files.copy(synonyms, config.resolve("synonyms.txt")); Files.copy(synonymsWordnet, config.resolve("synonyms_wordnet.txt")); - String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json"; + String json = "/org/elasticsearch/analysis/common/synonyms.json"; Settings settings = Settings.builder(). loadFromStream(json, getClass().getResourceAsStream(json), false) .put(Environment.PATH_HOME_SETTING.getKey(), home) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); - indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers; + indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; match("synonymAnalyzer", "kimchy is the dude abides", "shay is the elasticsearch man!"); match("synonymAnalyzer_file", "kimchy is the dude abides", "shay is the elasticsearch man!"); @@ -93,7 +91,7 @@ public void testSynonymWordDeleteByAnalyzer() throws IOException { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { - indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers; + indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; fail("fail! due to synonym word deleted by analyzer"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); @@ -114,7 +112,7 @@ public void testExpandSynonymWordDeleteByAnalyzer() throws IOException { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); try { - indexAnalyzers = createTestAnalysis(idxSettings, settings).indexAnalyzers; + indexAnalyzers = createTestAnalysis(idxSettings, settings, new CommonAnalysisPlugin()).indexAnalyzers; fail("fail! due to synonym word deleted by analyzer"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/WhitespaceTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactoryTests.java similarity index 99% rename from server/src/test/java/org/elasticsearch/index/analysis/WhitespaceTokenizerFactoryTests.java rename to modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactoryTests.java index 6dbb5e174b145..f34b694fbf60f 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/WhitespaceTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactoryTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import com.carrotsearch.randomizedtesting.generators.RandomStrings; diff --git a/server/src/test/resources/org/elasticsearch/index/analysis/synonyms/synonyms.json b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/synonyms.json similarity index 100% rename from server/src/test/resources/org/elasticsearch/index/analysis/synonyms/synonyms.json rename to modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/synonyms.json diff --git a/server/src/test/resources/org/elasticsearch/index/analysis/synonyms/synonyms.txt b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/synonyms.txt similarity index 100% rename from server/src/test/resources/org/elasticsearch/index/analysis/synonyms/synonyms.txt rename to modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/synonyms.txt diff --git a/server/src/test/resources/org/elasticsearch/index/analysis/synonyms/synonyms_wordnet.txt b/modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/synonyms_wordnet.txt similarity index 100% rename from server/src/test/resources/org/elasticsearch/index/analysis/synonyms/synonyms_wordnet.txt rename to modules/analysis-common/src/test/resources/org/elasticsearch/analysis/common/synonyms_wordnet.txt diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml index 71fb9b32d7d12..2e839fe012dc4 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/30_tokenizers.yml @@ -72,3 +72,374 @@ - match: { detail.tokenizer.name: _anonymous_tokenizer } - match: { detail.tokenizer.tokens.0.token: foo } - match: { detail.tokenizer.tokens.1.token: bar } + +--- +"thai_tokenizer": + - do: + indices.analyze: + body: + text: "ภาษาไทย" + explain: true + tokenizer: + type: thai + - length: { detail.tokenizer.tokens: 2 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: ภาษา } + - match: { detail.tokenizer.tokens.1.token: ไทย } + + - do: + indices.analyze: + body: + text: "ภาษาไทย" + explain: true + tokenizer: thai + - length: { detail.tokenizer.tokens: 2 } + - match: { detail.tokenizer.name: thai } + - match: { detail.tokenizer.tokens.0.token: ภาษา } + - match: { detail.tokenizer.tokens.1.token: ไทย } + +--- +"ngram": + - do: + indices.analyze: + body: + text: "foobar" + explain: true + tokenizer: + type: ngram + min_gram: 3 + max_gram: 3 + - length: { detail.tokenizer.tokens: 4 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: foo } + - match: { detail.tokenizer.tokens.1.token: oob } + - match: { detail.tokenizer.tokens.2.token: oba } + - match: { detail.tokenizer.tokens.3.token: bar } + + - do: + indices.analyze: + body: + text: "foobar" + explain: true + tokenizer: + type: nGram + min_gram: 3 + max_gram: 3 + - length: { detail.tokenizer.tokens: 4 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: foo } + - match: { detail.tokenizer.tokens.1.token: oob } + - match: { detail.tokenizer.tokens.2.token: oba } + - match: { detail.tokenizer.tokens.3.token: bar } + + - do: + indices.analyze: + body: + text: "foo" + explain: true + tokenizer: ngram + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: ngram } + - match: { detail.tokenizer.tokens.0.token: f } + - match: { detail.tokenizer.tokens.1.token: fo } + - match: { detail.tokenizer.tokens.2.token: o } + - match: { detail.tokenizer.tokens.3.token: oo } + - match: { detail.tokenizer.tokens.4.token: o } + + - do: + indices.analyze: + body: + text: "foo" + explain: true + tokenizer: nGram + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: nGram } + - match: { detail.tokenizer.tokens.0.token: f } + - match: { detail.tokenizer.tokens.1.token: fo } + - match: { detail.tokenizer.tokens.2.token: o } + - match: { detail.tokenizer.tokens.3.token: oo } + - match: { detail.tokenizer.tokens.4.token: o } + +--- +"edge_ngram": + - do: + indices.analyze: + body: + text: "foo" + explain: true + tokenizer: + type: edge_ngram + min_gram: 1 + max_gram: 3 + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: f } + - match: { detail.tokenizer.tokens.1.token: fo } + - match: { detail.tokenizer.tokens.2.token: foo } + + - do: + indices.analyze: + body: + text: "foo" + explain: true + tokenizer: + type: edgeNGram + min_gram: 1 + max_gram: 3 + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: f } + - match: { detail.tokenizer.tokens.1.token: fo } + - match: { detail.tokenizer.tokens.2.token: foo } + + - do: + indices.analyze: + body: + text: "foo" + explain: true + tokenizer: edge_ngram + - length: { detail.tokenizer.tokens: 2 } + - match: { detail.tokenizer.name: edge_ngram } + - match: { detail.tokenizer.tokens.0.token: f } + - match: { detail.tokenizer.tokens.1.token: fo } + + - do: + indices.analyze: + body: + text: "foo" + explain: true + tokenizer: edgeNGram + - length: { detail.tokenizer.tokens: 2 } + - match: { detail.tokenizer.name: edgeNGram } + - match: { detail.tokenizer.tokens.0.token: f } + - match: { detail.tokenizer.tokens.1.token: fo } + +--- +"classic": + - do: + indices.analyze: + body: + text: "Brown-Foxes don't jump." + explain: true + tokenizer: + type: classic + - length: { detail.tokenizer.tokens: 4 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: Brown } + - match: { detail.tokenizer.tokens.1.token: Foxes } + - match: { detail.tokenizer.tokens.2.token: don't } + - match: { detail.tokenizer.tokens.3.token: jump } + + - do: + indices.analyze: + body: + text: "Brown-Foxes don't jump." + explain: true + tokenizer: classic + - length: { detail.tokenizer.tokens: 4 } + - match: { detail.tokenizer.name: classic } + - match: { detail.tokenizer.tokens.0.token: Brown } + - match: { detail.tokenizer.tokens.1.token: Foxes } + - match: { detail.tokenizer.tokens.2.token: don't } + - match: { detail.tokenizer.tokens.3.token: jump } + +--- +"letter": + - do: + indices.analyze: + body: + text: "Brown-Foxes don't jump." + explain: true + tokenizer: + type: letter + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: Brown } + - match: { detail.tokenizer.tokens.1.token: Foxes } + - match: { detail.tokenizer.tokens.2.token: don } + - match: { detail.tokenizer.tokens.3.token: t } + - match: { detail.tokenizer.tokens.4.token: jump } + + - do: + indices.analyze: + body: + text: "Brown-Foxes don't jump." + explain: true + tokenizer: letter + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: letter } + - match: { detail.tokenizer.tokens.0.token: Brown } + - match: { detail.tokenizer.tokens.1.token: Foxes } + - match: { detail.tokenizer.tokens.2.token: don } + - match: { detail.tokenizer.tokens.3.token: t } + - match: { detail.tokenizer.tokens.4.token: jump } + +--- +"lowercase": + - do: + indices.analyze: + body: + text: "Brown-Foxes don't jump." + explain: true + tokenizer: + type: lowercase + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: brown } + - match: { detail.tokenizer.tokens.1.token: foxes } + - match: { detail.tokenizer.tokens.2.token: don } + - match: { detail.tokenizer.tokens.3.token: t } + - match: { detail.tokenizer.tokens.4.token: jump } + + - do: + indices.analyze: + body: + text: "Brown-Foxes don't jump." + explain: true + tokenizer: lowercase + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: lowercase } + - match: { detail.tokenizer.tokens.0.token: brown } + - match: { detail.tokenizer.tokens.1.token: foxes } + - match: { detail.tokenizer.tokens.2.token: don } + - match: { detail.tokenizer.tokens.3.token: t } + - match: { detail.tokenizer.tokens.4.token: jump } + +--- +"path_hierarchy": + - do: + indices.analyze: + body: + text: "a/b/c" + explain: true + tokenizer: + type: path_hierarchy + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: a } + - match: { detail.tokenizer.tokens.1.token: a/b } + - match: { detail.tokenizer.tokens.2.token: a/b/c } + + - do: + indices.analyze: + body: + text: "a/b/c" + explain: true + tokenizer: + type: PathHierarchy + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: a } + - match: { detail.tokenizer.tokens.1.token: a/b } + - match: { detail.tokenizer.tokens.2.token: a/b/c } + + - do: + indices.analyze: + body: + text: "a/b/c" + explain: true + tokenizer: path_hierarchy + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: path_hierarchy } + - match: { detail.tokenizer.tokens.0.token: a } + - match: { detail.tokenizer.tokens.1.token: a/b } + - match: { detail.tokenizer.tokens.2.token: a/b/c } + + - do: + indices.analyze: + body: + text: "a/b/c" + explain: true + tokenizer: PathHierarchy + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: PathHierarchy } + - match: { detail.tokenizer.tokens.0.token: a } + - match: { detail.tokenizer.tokens.1.token: a/b } + - match: { detail.tokenizer.tokens.2.token: a/b/c } + +--- +"pattern": + - do: + indices.analyze: + body: + text: "split by whitespace by default" + explain: true + tokenizer: + type: pattern + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: split } + - match: { detail.tokenizer.tokens.1.token: by } + - match: { detail.tokenizer.tokens.2.token: whitespace } + - match: { detail.tokenizer.tokens.3.token: by } + - match: { detail.tokenizer.tokens.4.token: default } + + - do: + indices.analyze: + body: + text: "split by whitespace by default" + explain: true + tokenizer: pattern + - length: { detail.tokenizer.tokens: 5 } + - match: { detail.tokenizer.name: pattern } + - match: { detail.tokenizer.tokens.0.token: split } + - match: { detail.tokenizer.tokens.1.token: by } + - match: { detail.tokenizer.tokens.2.token: whitespace } + - match: { detail.tokenizer.tokens.3.token: by } + - match: { detail.tokenizer.tokens.4.token: default } + +--- +"uax_url_email": + - do: + indices.analyze: + body: + text: "Email me at john.smith@global-international.com" + explain: true + tokenizer: + type: uax_url_email + - length: { detail.tokenizer.tokens: 4 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: Email } + - match: { detail.tokenizer.tokens.1.token: me } + - match: { detail.tokenizer.tokens.2.token: at } + - match: { detail.tokenizer.tokens.3.token: john.smith@global-international.com } + + - do: + indices.analyze: + body: + text: "Email me at john.smith@global-international.com" + explain: true + tokenizer: uax_url_email + - length: { detail.tokenizer.tokens: 4 } + - match: { detail.tokenizer.name: uax_url_email } + - match: { detail.tokenizer.tokens.0.token: Email } + - match: { detail.tokenizer.tokens.1.token: me } + - match: { detail.tokenizer.tokens.2.token: at } + - match: { detail.tokenizer.tokens.3.token: john.smith@global-international.com } + +--- +"whitespace": + - do: + indices.analyze: + body: + text: "split by whitespace" + explain: true + tokenizer: + type: whitespace + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: split } + - match: { detail.tokenizer.tokens.1.token: by } + - match: { detail.tokenizer.tokens.2.token: whitespace } + + - do: + indices.analyze: + body: + text: "split by whitespace" + explain: true + tokenizer: whitespace + - length: { detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: whitespace } + - match: { detail.tokenizer.tokens.0.token: split } + - match: { detail.tokenizer.tokens.1.token: by } + - match: { detail.tokenizer.tokens.2.token: whitespace } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml index f8fc3acc02c4c..d7ad1bf6f2a01 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml @@ -70,3 +70,36 @@ text: "foo" - length: { tokens: 1 } - match: { tokens.0.token: "\nfoo\n" } + +--- +"Synonym filter with tokenizer": + - skip: + version: " - 5.99.99" + reason: to support synonym same analysis chain were added in 6.0.0 + - do: + indices.create: + index: test_synonym + body: + settings: + index: + analysis: + tokenizer: + trigram: + type: nGram + min_gram: 3 + max_gram: 3 + filter: + synonym: + type: synonym + synonyms: ["kimchy => shay"] + + - do: + indices.analyze: + index: test_synonym + body: + tokenizer: trigram + filter: [synonym] + text: kimchy + - length: { tokens: 2 } + - match: { tokens.0.token: sha } + - match: { tokens.1.token: hay } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/20_ngram_search.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/20_ngram_search.yml index eb8c9789a63ce..ec7b9493ac07e 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/20_ngram_search.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/20_ngram_search.yml @@ -39,3 +39,97 @@ text: query: foa - match: {hits.total: 1} + +--- +"testNGramCopyField": + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + max_ngram_diff: 9 + analysis: + analyzer: + my_ngram_analyzer: + tokenizer: my_ngram_tokenizer + tokenizer: + my_ngram_tokenizer: + type: ngram + min: 1, + max: 10 + token_chars: [] + mappings: + doc: + properties: + origin: + type: text + copy_to: meta + meta: + type: text + analyzer: my_ngram_analyzer + + - do: + index: + index: test + type: doc + id: 1 + body: { "origin": "C.A1234.5678" } + refresh: true + + - do: + search: + body: + query: + match: + meta: + query: 1234 + - match: {hits.total: 1} + + - do: + search: + body: + query: + match: + meta: + query: 1234.56 + - match: {hits.total: 1} + + - do: + search: + body: + query: + match: + meta: + query: A1234 + - match: {hits.total: 1} + + - do: + search: + body: + query: + term: + meta: + value: a1234 + - match: {hits.total: 0} + + - do: + search: + body: + query: + match: + meta: + query: A1234 + analyzer: my_ngram_analyzer + - match: {hits.total: 1} + + - do: + search: + body: + query: + match: + meta: + query: a1234 + analyzer: my_ngram_analyzer + - match: {hits.total: 1} diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java index 8e0828fcfcaea..22875139c9beb 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalSpec.java @@ -57,7 +57,7 @@ public class RankEvalSpec implements Writeable, ToXContentObject { /** Default max number of requests. */ private static final int MAX_CONCURRENT_SEARCHES = 10; /** optional: Templates to base test requests on */ - private Map templates = new HashMap<>(); + private final Map templates = new HashMap<>(); public RankEvalSpec(List ratedRequests, EvaluationMetric metric, Collection templates) { this.metric = Objects.requireNonNull(metric, "Cannot evaluate ranking if no evaluation metric is provided."); @@ -68,8 +68,8 @@ public RankEvalSpec(List ratedRequests, EvaluationMetric metric, C this.ratedRequests = ratedRequests; if (templates == null || templates.isEmpty()) { for (RatedRequest request : ratedRequests) { - if (request.getTestRequest() == null) { - throw new IllegalStateException("Cannot evaluate ranking if neither template nor test request is " + if (request.getEvaluationRequest() == null) { + throw new IllegalStateException("Cannot evaluate ranking if neither template nor evaluation request is " + "provided. Seen for request id: " + request.getId()); } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java index 392ce5d0633a0..79dd693b3ac3c 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedRequest.java @@ -75,9 +75,12 @@ public class RatedRequest implements Writeable, ToXContentObject { private final String id; private final List summaryFields; private final List ratedDocs; - // Search request to execute for this rated request. This can be null if template and corresponding parameters are supplied. + /** + * Search request to execute for this rated request. This can be null in + * case the query is supplied as a template with corresponding parameters + */ @Nullable - private SearchSourceBuilder testRequest; + private final SearchSourceBuilder evaluationRequest; /** * Map of parameters to use for filling a query template, can be used * instead of providing testRequest. @@ -86,27 +89,49 @@ public class RatedRequest implements Writeable, ToXContentObject { @Nullable private String templateId; - private RatedRequest(String id, List ratedDocs, SearchSourceBuilder testRequest, + /** + * Create a rated request with template ids and parameters. + * + * @param id a unique name for this rated request + * @param ratedDocs a list of document ratings + * @param params template parameters + * @param templateId a templare id + */ + public RatedRequest(String id, List ratedDocs, Map params, + String templateId) { + this(id, ratedDocs, null, params, templateId); + } + + /** + * Create a rated request using a {@link SearchSourceBuilder} to define the + * evaluated query. + * + * @param id a unique name for this rated request + * @param ratedDocs a list of document ratings + * @param evaluatedQuery the query that is evaluated + */ + public RatedRequest(String id, List ratedDocs, SearchSourceBuilder evaluatedQuery) { + this(id, ratedDocs, evaluatedQuery, new HashMap<>(), null); + } + + private RatedRequest(String id, List ratedDocs, SearchSourceBuilder evaluatedQuery, Map params, String templateId) { - if (params != null && (params.size() > 0 && testRequest != null)) { + if (params != null && (params.size() > 0 && evaluatedQuery != null)) { throw new IllegalArgumentException( - "Ambiguous rated request: Set both, verbatim test request and test request " - + "template parameters."); + "Ambiguous rated request: Set both, verbatim test request and test request " + "template parameters."); } - if (templateId != null && testRequest != null) { + if (templateId != null && evaluatedQuery != null) { throw new IllegalArgumentException( - "Ambiguous rated request: Set both, verbatim test request and test request " - + "template parameters."); + "Ambiguous rated request: Set both, verbatim test request and test request " + "template parameters."); } - if ((params == null || params.size() < 1) && testRequest == null) { - throw new IllegalArgumentException( - "Need to set at least test request or test request template parameters."); + if ((params == null || params.size() < 1) && evaluatedQuery == null) { + throw new IllegalArgumentException("Need to set at least test request or test request template parameters."); } if ((params != null && params.size() > 0) && templateId == null) { - throw new IllegalArgumentException( - "If template parameters are supplied need to set id of template to apply " - + "them to too."); + throw new IllegalArgumentException("If template parameters are supplied need to set id of template to apply " + "them to too."); } + validateEvaluatedQuery(evaluatedQuery); + // check that not two documents with same _index/id are specified Set docKeys = new HashSet<>(); for (RatedDocument doc : ratedDocs) { @@ -118,7 +143,7 @@ private RatedRequest(String id, List ratedDocs, SearchSourceBuild } this.id = id; - this.testRequest = testRequest; + this.evaluationRequest = evaluatedQuery; this.ratedDocs = new ArrayList<>(ratedDocs); if (params != null) { this.params = new HashMap<>(params); @@ -129,18 +154,30 @@ private RatedRequest(String id, List ratedDocs, SearchSourceBuild this.summaryFields = new ArrayList<>(); } - public RatedRequest(String id, List ratedDocs, Map params, - String templateId) { - this(id, ratedDocs, null, params, templateId); - } - - public RatedRequest(String id, List ratedDocs, SearchSourceBuilder testRequest) { - this(id, ratedDocs, testRequest, new HashMap<>(), null); + static void validateEvaluatedQuery(SearchSourceBuilder evaluationRequest) { + // ensure that testRequest, if set, does not contain aggregation, suggest or highlighting section + if (evaluationRequest != null) { + if (evaluationRequest.suggest() != null) { + throw new IllegalArgumentException("Query in rated requests should not contain a suggest section."); + } + if (evaluationRequest.aggregations() != null) { + throw new IllegalArgumentException("Query in rated requests should not contain aggregations."); + } + if (evaluationRequest.highlighter() != null) { + throw new IllegalArgumentException("Query in rated requests should not contain a highlighter section."); + } + if (evaluationRequest.explain() != null && evaluationRequest.explain()) { + throw new IllegalArgumentException("Query in rated requests should not use explain."); + } + if (evaluationRequest.profile()) { + throw new IllegalArgumentException("Query in rated requests should not use profile."); + } + } } - public RatedRequest(StreamInput in) throws IOException { + RatedRequest(StreamInput in) throws IOException { this.id = in.readString(); - testRequest = in.readOptionalWriteable(SearchSourceBuilder::new); + evaluationRequest = in.readOptionalWriteable(SearchSourceBuilder::new); int intentSize = in.readInt(); ratedDocs = new ArrayList<>(intentSize); @@ -159,7 +196,7 @@ public RatedRequest(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); - out.writeOptionalWriteable(testRequest); + out.writeOptionalWriteable(evaluationRequest); out.writeInt(ratedDocs.size()); for (RatedDocument ratedDoc : ratedDocs) { @@ -173,8 +210,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(this.templateId); } - public SearchSourceBuilder getTestRequest() { - return testRequest; + public SearchSourceBuilder getEvaluationRequest() { + return evaluationRequest; } /** return the user supplied request id */ @@ -240,8 +277,8 @@ public static RatedRequest fromXContent(XContentParser parser) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(ID_FIELD.getPreferredName(), this.id); - if (testRequest != null) { - builder.field(REQUEST_FIELD.getPreferredName(), this.testRequest); + if (evaluationRequest != null) { + builder.field(REQUEST_FIELD.getPreferredName(), this.evaluationRequest); } builder.startArray(RATINGS_FIELD.getPreferredName()); for (RatedDocument doc : this.ratedDocs) { @@ -285,7 +322,7 @@ public final boolean equals(Object obj) { RatedRequest other = (RatedRequest) obj; - return Objects.equals(id, other.id) && Objects.equals(testRequest, other.testRequest) + return Objects.equals(id, other.id) && Objects.equals(evaluationRequest, other.evaluationRequest) && Objects.equals(summaryFields, other.summaryFields) && Objects.equals(ratedDocs, other.ratedDocs) && Objects.equals(params, other.params) @@ -294,7 +331,7 @@ public final boolean equals(Object obj) { @Override public final int hashCode() { - return Objects.hash(id, testRequest, summaryFields, ratedDocs, params, + return Objects.hash(id, evaluationRequest, summaryFields, ratedDocs, params, templateId); } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java index 019ae274466ab..e0a0b3ea13378 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java @@ -52,6 +52,7 @@ import java.util.concurrent.ConcurrentHashMap; import static org.elasticsearch.common.xcontent.XContentHelper.createParser; +import static org.elasticsearch.index.rankeval.RatedRequest.validateEvaluatedQuery; /** * Instances of this class execute a collection of search intents (read: user @@ -99,15 +100,17 @@ protected void doExecute(RankEvalRequest request, ActionListener ratedRequestsInSearch = new ArrayList<>(); for (RatedRequest ratedRequest : ratedRequests) { - SearchSourceBuilder ratedSearchSource = ratedRequest.getTestRequest(); - if (ratedSearchSource == null) { + SearchSourceBuilder evaluationRequest = ratedRequest.getEvaluationRequest(); + if (evaluationRequest == null) { Map params = ratedRequest.getParams(); String templateId = ratedRequest.getTemplateId(); TemplateScript.Factory templateScript = scriptsWithoutParams.get(templateId); String resolvedRequest = templateScript.newInstance(params).execute(); try (XContentParser subParser = createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, new BytesArray(resolvedRequest), XContentType.JSON)) { - ratedSearchSource = SearchSourceBuilder.fromXContent(subParser, false); + evaluationRequest = SearchSourceBuilder.fromXContent(subParser, false); + // check for parts that should not be part of a ranking evaluation request + validateEvaluatedQuery(evaluationRequest); } catch (IOException e) { // if we fail parsing, put the exception into the errors map and continue errors.put(ratedRequest.getId(), e); @@ -116,17 +119,17 @@ LoggingDeprecationHandler.INSTANCE, new BytesArray(resolvedRequest), XContentTyp } if (metric.forcedSearchSize().isPresent()) { - ratedSearchSource.size(metric.forcedSearchSize().get()); + evaluationRequest.size(metric.forcedSearchSize().get()); } ratedRequestsInSearch.add(ratedRequest); List summaryFields = ratedRequest.getSummaryFields(); if (summaryFields.isEmpty()) { - ratedSearchSource.fetchSource(false); + evaluationRequest.fetchSource(false); } else { - ratedSearchSource.fetchSource(summaryFields.toArray(new String[summaryFields.size()]), new String[0]); + evaluationRequest.fetchSource(summaryFields.toArray(new String[summaryFields.size()]), new String[0]); } - SearchRequest searchRequest = new SearchRequest(request.indices(), ratedSearchSource); + SearchRequest searchRequest = new SearchRequest(request.indices(), evaluationRequest); searchRequest.indicesOptions(request.indicesOptions()); msearchRequest.add(searchRequest); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java index 196b50b7f6163..084f29b8c9a87 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java @@ -33,7 +33,11 @@ import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.suggest.SuggestBuilder; +import org.elasticsearch.search.suggest.SuggestBuilders; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -165,7 +169,7 @@ public void testEqualsAndHash() throws IOException { private static RatedRequest mutateTestItem(RatedRequest original) { String id = original.getId(); - SearchSourceBuilder testRequest = original.getTestRequest(); + SearchSourceBuilder evaluationRequest = original.getEvaluationRequest(); List ratedDocs = original.getRatedDocs(); Map params = original.getParams(); List summaryFields = original.getSummaryFields(); @@ -177,11 +181,11 @@ private static RatedRequest mutateTestItem(RatedRequest original) { id = randomValueOtherThan(id, () -> randomAlphaOfLength(10)); break; case 1: - if (testRequest != null) { - int size = randomValueOtherThan(testRequest.size(), () -> randomInt(Integer.MAX_VALUE)); - testRequest = new SearchSourceBuilder(); - testRequest.size(size); - testRequest.query(new MatchAllQueryBuilder()); + if (evaluationRequest != null) { + int size = randomValueOtherThan(evaluationRequest.size(), () -> randomInt(Integer.MAX_VALUE)); + evaluationRequest = new SearchSourceBuilder(); + evaluationRequest.size(size); + evaluationRequest.query(new MatchAllQueryBuilder()); } else { if (randomBoolean()) { Map mutated = new HashMap<>(); @@ -204,10 +208,10 @@ private static RatedRequest mutateTestItem(RatedRequest original) { } RatedRequest ratedRequest; - if (testRequest == null) { + if (evaluationRequest == null) { ratedRequest = new RatedRequest(id, ratedDocs, params, templateId); } else { - ratedRequest = new RatedRequest(id, ratedDocs, testRequest); + ratedRequest = new RatedRequest(id, ratedDocs, evaluationRequest); } ratedRequest.addSummaryFields(summaryFields); @@ -258,6 +262,44 @@ public void testSettingTemplateIdNoParamsThrows() { expectThrows(IllegalArgumentException.class, () -> new RatedRequest("id", ratedDocs, null, "templateId")); } + public void testAggsNotAllowed() { + List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); + SearchSourceBuilder query = new SearchSourceBuilder(); + query.aggregation(AggregationBuilders.terms("fieldName")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new RatedRequest("id", ratedDocs, query)); + assertEquals("Query in rated requests should not contain aggregations.", e.getMessage()); + } + + public void testSuggestionsNotAllowed() { + List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); + SearchSourceBuilder query = new SearchSourceBuilder(); + query.suggest(new SuggestBuilder().addSuggestion("id", SuggestBuilders.completionSuggestion("fieldname"))); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new RatedRequest("id", ratedDocs, query)); + assertEquals("Query in rated requests should not contain a suggest section.", e.getMessage()); + } + + public void testHighlighterNotAllowed() { + List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); + SearchSourceBuilder query = new SearchSourceBuilder(); + query.highlighter(new HighlightBuilder().field("field")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new RatedRequest("id", ratedDocs, query)); + assertEquals("Query in rated requests should not contain a highlighter section.", e.getMessage()); + } + + public void testExplainNotAllowed() { + List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new RatedRequest("id", ratedDocs, new SearchSourceBuilder().explain(true))); + assertEquals("Query in rated requests should not use explain.", e.getMessage()); + } + + public void testProfileNotAllowed() { + List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> new RatedRequest("id", ratedDocs, new SearchSourceBuilder().profile(true))); + assertEquals("Query in rated requests should not use profile.", e.getMessage()); + } + /** * test that modifying the order of index/docId to make sure it doesn't * matter for parsing xContent @@ -287,7 +329,7 @@ public void testParseFromXContent() throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, querySpecString)) { RatedRequest specification = RatedRequest.fromXContent(parser); assertEquals("my_qa_query", specification.getId()); - assertNotNull(specification.getTestRequest()); + assertNotNull(specification.getEvaluationRequest()); List ratedDocs = specification.getRatedDocs(); assertEquals(3, ratedDocs.size()); for (int i = 0; i < 3; i++) { diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 8231e15af200c..3c94f4ace7759 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -230,6 +230,11 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { fixtureSupported = true } +boolean legalPath = rootProject.rootDir.toString().contains(" ") == false +if (legalPath == false) { + fixtureSupported = false +} + // Always ignore HA integration tests in the normal integration test runner, they are included below as // part of their own HA-specific integration test tasks. integTestRunner.exclude('**/Ha*TestSuiteIT.class') @@ -248,7 +253,12 @@ if (fixtureSupported) { // Only include the HA integration tests for the HA test task integTestHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class']) } else { - logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") + if (legalPath) { + logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") + } else { + logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") + } + // The normal integration test runner will just test that the plugin loads integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic' // HA fixture is unsupported. Don't run them. diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index fa9cda06589c6..926cf0b2ad4af 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -100,14 +100,9 @@ public InputStream readBlob(String blobName) throws IOException { @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - if (blobExists(blobName)) { - throw new FileAlreadyExistsException("blob [" + blobName + "] already exists, cannot overwrite"); - } store.execute((Operation) fileContext -> { Path blob = new Path(path, blobName); // we pass CREATE, which means it fails if a blob already exists. - // NOTE: this behavior differs from FSBlobContainer, which passes TRUNCATE_EXISTING - // that should be fixed there, no need to bring truncation into this, give the user an error. EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); CreateOpts[] opts = {CreateOpts.bufferSize(bufferSize)}; try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) { @@ -121,6 +116,8 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize) t // if true synchronous behavior is required" stream.hsync(); } + } catch (org.apache.hadoop.fs.FileAlreadyExistsException faee) { + throw new FileAlreadyExistsException(blob.toString(), null, faee.getMessage()); } return null; }); diff --git a/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java b/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java index 50860ddd87b21..0ad78ad0c7a7e 100644 --- a/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java +++ b/qa/smoke-test-rank-eval-with-mustache/src/test/java/org/elasticsearch/index/rankeval/SmokeMultipleTemplatesIT.java @@ -30,6 +30,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -106,6 +107,43 @@ public void testPrecisionAtRequest() throws IOException { assertEquals(0.9, response.getEvaluationResult(), Double.MIN_VALUE); } + public void testTemplateWithAggsFails() { + String template = "{ \"aggs\" : { \"avg_grade\" : { \"avg\" : { \"field\" : \"grade\" }}}}"; + assertTemplatedRequestFailures(template, "Query in rated requests should not contain aggregations."); + } + + public void testTemplateWithSuggestFails() { + String template = "{\"suggest\" : {\"my-suggestion\" : {\"text\" : \"Elastic\",\"term\" : {\"field\" : \"message\"}}}}"; + assertTemplatedRequestFailures(template, "Query in rated requests should not contain a suggest section."); + } + + public void testTemplateWithHighlighterFails() { + String template = "{\"highlight\" : { \"fields\" : {\"content\" : {}}}}"; + assertTemplatedRequestFailures(template, "Query in rated requests should not contain a highlighter section."); + } + + public void testTemplateWithProfileFails() { + String template = "{\"profile\" : \"true\" }"; + assertTemplatedRequestFailures(template, "Query in rated requests should not use profile."); + } + + public void testTemplateWithExplainFails() { + String template = "{\"explain\" : \"true\" }"; + assertTemplatedRequestFailures(template, "Query in rated requests should not use explain."); + } + + private static void assertTemplatedRequestFailures(String template, String expectedMessage) { + List ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1)); + RatedRequest ratedRequest = new RatedRequest("id", ratedDocs, Collections.singletonMap("param1", "value1"), "templateId"); + Collection templates = Collections.singletonList(new ScriptWithId("templateId", + new Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, template, Collections.emptyMap()))); + RankEvalSpec rankEvalSpec = new RankEvalSpec(Collections.singletonList(ratedRequest), new PrecisionAtK(), templates); + RankEvalRequest rankEvalRequest = new RankEvalRequest(rankEvalSpec, new String[] { "test" }); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> client().execute(RankEvalAction.INSTANCE, rankEvalRequest).actionGet()); + assertEquals(expectedMessage, e.getMessage()); + } + private static List createRelevant(String... docs) { List relevant = new ArrayList<>(); for (String doc : docs) { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yml index 3429de6ed455f..ed4f228ab3da7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yml @@ -79,39 +79,6 @@ - match: { detail.tokenfilters.0.name: "_anonymous_tokenfilter" } - match: { detail.tokenfilters.0.tokens.0.token: bar } ---- -"Synonym filter with tokenizer": - - skip: - version: " - 5.99.99" - reason: to support synonym same analysis chain were added in 6.0.0 - - do: - indices.create: - index: test_synonym - body: - settings: - index: - analysis: - tokenizer: - trigram: - type: nGram - min_gram: 3 - max_gram: 3 - filter: - synonym: - type: synonym - synonyms: ["kimchy => shay"] - - - do: - indices.analyze: - index: test_synonym - body: - tokenizer: trigram - filter: [synonym] - text: kimchy - - length: { tokens: 2 } - - match: { tokens.0.token: sha } - - match: { tokens.1.token: hay } - --- "Custom normalizer in request": - skip: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml index f53c88bcfca2e..060d11b2e7467 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yml @@ -1,5 +1,9 @@ --- "Shrink index via API": + - skip: + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send + features: "warnings" # creates an index with one document solely allocated on the master node # and shrinks it into a new index with a single shard # we don't do the relocation to a single node after the index is created @@ -62,6 +66,8 @@ body: settings: index.number_of_replicas: 0 + warnings: + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml index 13e52a2dc805f..f24cca9c5cb28 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/20_source_mapping.yml @@ -1,8 +1,9 @@ --- "Shrink index ignores target template mapping": - skip: - version: " - 5.99.99" - reason: bug fixed in 5.6.0 + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send + features: "warnings" - do: cluster.state: {} @@ -69,6 +70,8 @@ body: settings: index.number_of_replicas: 0 + warnings: + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml index 34757427e6983..7a8b2f88e8ba5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/30_copy_settings.yml @@ -2,7 +2,7 @@ "Copy settings during shrink index": - skip: version: " - 6.3.99" - reason: copy_settings did not exist prior to 6.4.0 + reason: expects warnings that pre-6.4.0 will not send features: "warnings" - do: @@ -47,8 +47,6 @@ settings: index.number_of_replicas: 0 index.merge.scheduler.max_thread_count: 2 - warnings: - - "parameter [copy_settings] is deprecated but was [true]" - do: cluster.health: @@ -64,20 +62,19 @@ - match: { copy-settings-target.settings.index.blocks.write: "true" } - match: { copy-settings-target.settings.index.routing.allocation.include._id: $master } - # now we do a actual shrink and do not copy settings + # now we do a actual shrink and do not copy settings (by default) - do: indices.shrink: index: "source" target: "no-copy-settings-target" wait_for_active_shards: 1 master_timeout: 10s - copy_settings: false body: settings: index.number_of_replicas: 0 index.merge.scheduler.max_thread_count: 2 warnings: - - "parameter [copy_settings] is deprecated but was [false]" + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: @@ -92,3 +89,16 @@ - match: { no-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" } - is_false: no-copy-settings-target.settings.index.blocks.write - is_false: no-copy-settings-target.settings.index.routing.allocation.include._id + + # now we do a actual shrink and try to set no copy settings + - do: + catch: /illegal_argument_exception/ + indices.shrink: + index: "source" + target: "explicit-no-copy-settings-target" + wait_for_active_shards: 1 + master_timeout: 10s + copy_settings: false + body: + settings: + index.number_of_replicas: 0 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml index 7f0b294a69d2b..cbdc412395056 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml @@ -33,8 +33,9 @@ setup: --- "Split index via API": - skip: - version: " - 6.0.99" - reason: Added in 6.1.0 + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send + features: "warnings" # make it read-only - do: @@ -60,6 +61,8 @@ setup: settings: index.number_of_replicas: 0 index.number_of_shards: 2 + warnings: + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: @@ -103,8 +106,9 @@ setup: --- "Create illegal split indices": - skip: - version: " - 6.0.99" - reason: Added in 6.1.0 + version: " - 6.3.99" + reason: expects warnings that pre-6.4.0 will not send + features: "warnings" # try to do an illegal split with number_of_routing_shards set - do: @@ -119,6 +123,8 @@ setup: index.number_of_replicas: 0 index.number_of_shards: 2 index.number_of_routing_shards: 4 + warnings: + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" # try to do an illegal split with illegal number_of_shards - do: @@ -132,3 +138,5 @@ setup: settings: index.number_of_replicas: 0 index.number_of_shards: 3 + warnings: + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml index 69b505097f2ec..4bac4bf5b0807 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml @@ -1,12 +1,12 @@ --- "Split index ignores target template mapping": -# - skip: -# version: " - 6.0.99" -# reason: Added in 6.1.0 -# uncomment once AwaitsFix is resolved - skip: + # when re-enabling uncomment the below skips version: "all" reason: "AwaitsFix'ing, see https://github.com/elastic/elasticsearch/issues/30503" + # version: " - 6.3.99" + # reason: expects warnings that pre-6.4.0 will not send + features: "warnings" # create index - do: @@ -68,6 +68,8 @@ settings: index.number_of_shards: 2 index.number_of_replicas: 0 + warnings: + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml index 1d3e37aa7b05d..9e64b2b8130ad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/30_copy_settings.yml @@ -2,7 +2,7 @@ "Copy settings during split index": - skip: version: " - 6.3.99" - reason: copy_settings did not exist prior to 6.4.0 + reason: expects warnings that pre-6.4.0 will not send features: "warnings" - do: @@ -50,8 +50,6 @@ index.number_of_replicas: 0 index.number_of_shards: 2 index.merge.scheduler.max_thread_count: 2 - warnings: - - "parameter [copy_settings] is deprecated but was [true]" - do: cluster.health: @@ -67,21 +65,20 @@ - match: { copy-settings-target.settings.index.blocks.write: "true" } - match: { copy-settings-target.settings.index.routing.allocation.include._id: $master } - # now we do a actual shrink and do not copy settings + # now we do a actual shrink and do not copy settings (by default) - do: indices.split: index: "source" target: "no-copy-settings-target" wait_for_active_shards: 1 master_timeout: 10s - copy_settings: false body: settings: index.number_of_replicas: 0 index.number_of_shards: 2 index.merge.scheduler.max_thread_count: 2 warnings: - - "parameter [copy_settings] is deprecated but was [false]" + - "resize operations without copying settings is deprecated; set parameter [copy_settings] to [true] for future default behavior" - do: cluster.health: @@ -96,3 +93,15 @@ - match: { no-copy-settings-target.settings.index.merge.scheduler.max_thread_count: "2" } - is_false: no-copy-settings-target.settings.index.blocks.write - is_false: no-copy-settings-target.settings.index.routing.allocation.include._id + + - do: + catch: /illegal_argument_exception/ + indices.split: + index: "source" + target: "explicit-no-copy-settings-target" + wait_for_active_shards: 1 + master_timeout: 10s + copy_settings: false + body: + settings: + index.number_of_replicas: 0 diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index f53b5437f03c2..ca046c48accff 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -56,7 +56,7 @@ public class ResizeRequest extends AcknowledgedRequest implements private CreateIndexRequest targetIndexRequest; private String sourceIndex; private ResizeType type = ResizeType.SHRINK; - private boolean copySettings = false; + private Boolean copySettings; ResizeRequest() {} @@ -80,6 +80,7 @@ public ActionRequestValidationException validate() { if (type == ResizeType.SPLIT && IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.exists(targetIndexRequest.settings()) == false) { validationException = addValidationError("index.number_of_shards is required for split operations", validationException); } + assert copySettings == null || copySettings; return validationException; } @@ -98,10 +99,10 @@ public void readFrom(StreamInput in) throws IOException { } else { type = ResizeType.SHRINK; // BWC this used to be shrink only } - if (in.getVersion().onOrAfter(Version.V_6_4_0)) { - copySettings = in.readBoolean(); + if (in.getVersion().before(Version.V_6_4_0)) { + copySettings = null; } else { - copySettings = false; + copySettings = in.readOptionalBoolean(); } } @@ -113,8 +114,11 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(ResizeAction.COMPATIBILITY_VERSION)) { out.writeEnum(type); } - if (out.getVersion().onOrAfter(Version.V_6_4_0)) { - out.writeBoolean(copySettings); + // noinspection StatementWithEmptyBody + if (out.getVersion().before(Version.V_6_4_0)) { + + } else { + out.writeOptionalBoolean(copySettings); } } @@ -187,11 +191,14 @@ public ResizeType getResizeType() { return type; } - public void setCopySettings(final boolean copySettings) { + public void setCopySettings(final Boolean copySettings) { + if (copySettings != null && copySettings == false) { + throw new IllegalArgumentException("[copySettings] can not be explicitly set to [false]"); + } this.copySettings = copySettings; } - public boolean getCopySettings() { + public Boolean getCopySettings() { return copySettings; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java index af22f30091852..3ccba85502569 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java @@ -185,7 +185,7 @@ static CreateIndexClusterStateUpdateRequest prepareCreateIndexRequest(final Resi .waitForActiveShards(targetIndex.waitForActiveShards()) .recoverFrom(metaData.getIndex()) .resizeType(resizeRequest.getResizeType()) - .copySettings(resizeRequest.getCopySettings()); + .copySettings(resizeRequest.getCopySettings() == null ? false : resizeRequest.getCopySettings()); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index f960664306f08..a9600681d1605 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -123,9 +123,6 @@ public InputStream readBlob(String name) throws IOException { @Override public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - if (blobExists(blobName)) { - throw new FileAlreadyExistsException("blob [" + blobName + "] already exists, cannot overwrite"); - } final Path file = path.resolve(blobName); try (OutputStream outputStream = Files.newOutputStream(file, StandardOpenOption.CREATE_NEW)) { Streams.copy(inputStream, outputStream); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/KeywordTokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/KeywordTokenizerFactory.java index a3707d9e44a0d..1d94cad150785 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/KeywordTokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/KeywordTokenizerFactory.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenizerFactory; public class KeywordTokenizerFactory extends AbstractTokenizerFactory { diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index 34c29ab0f1890..a07b4186ed594 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -177,7 +177,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), GeoPolygonQueryBuilder.NAME); } - if (!GeoUtils.isValidLongitude(point.lat())) { + if (!GeoUtils.isValidLongitude(point.lon())) { throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(), GeoPolygonQueryBuilder.NAME); } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 2d9e8e78b7768..1054744422638 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -39,11 +39,9 @@ import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.ChineseAnalyzerProvider; import org.elasticsearch.index.analysis.CjkAnalyzerProvider; -import org.elasticsearch.index.analysis.ClassicTokenizerFactory; import org.elasticsearch.index.analysis.CzechAnalyzerProvider; import org.elasticsearch.index.analysis.DanishAnalyzerProvider; import org.elasticsearch.index.analysis.DutchAnalyzerProvider; -import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory; import org.elasticsearch.index.analysis.EnglishAnalyzerProvider; import org.elasticsearch.index.analysis.FingerprintAnalyzerProvider; import org.elasticsearch.index.analysis.FinnishAnalyzerProvider; @@ -60,14 +58,9 @@ import org.elasticsearch.index.analysis.KeywordAnalyzerProvider; import org.elasticsearch.index.analysis.KeywordTokenizerFactory; import org.elasticsearch.index.analysis.LatvianAnalyzerProvider; -import org.elasticsearch.index.analysis.LetterTokenizerFactory; import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider; -import org.elasticsearch.index.analysis.LowerCaseTokenizerFactory; -import org.elasticsearch.index.analysis.NGramTokenizerFactory; import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider; -import org.elasticsearch.index.analysis.PathHierarchyTokenizerFactory; import org.elasticsearch.index.analysis.PatternAnalyzerProvider; -import org.elasticsearch.index.analysis.PatternTokenizerFactory; import org.elasticsearch.index.analysis.PersianAnalyzerProvider; import org.elasticsearch.index.analysis.PortugueseAnalyzerProvider; import org.elasticsearch.index.analysis.PreConfiguredCharFilter; @@ -88,13 +81,10 @@ import org.elasticsearch.index.analysis.StopTokenFilterFactory; import org.elasticsearch.index.analysis.SwedishAnalyzerProvider; import org.elasticsearch.index.analysis.ThaiAnalyzerProvider; -import org.elasticsearch.index.analysis.ThaiTokenizerFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.analysis.TurkishAnalyzerProvider; -import org.elasticsearch.index.analysis.UAX29URLEmailTokenizerFactory; import org.elasticsearch.index.analysis.WhitespaceAnalyzerProvider; -import org.elasticsearch.index.analysis.WhitespaceTokenizerFactory; import org.elasticsearch.plugins.AnalysisPlugin; import java.io.IOException; @@ -223,36 +213,19 @@ static Map setupPreConfiguredTokenizers(List> setupTokenizers(List plugins) { NamedRegistry> tokenizers = new NamedRegistry<>("tokenizer"); tokenizers.register("standard", StandardTokenizerFactory::new); - tokenizers.register("uax_url_email", UAX29URLEmailTokenizerFactory::new); - tokenizers.register("path_hierarchy", PathHierarchyTokenizerFactory::new); - tokenizers.register("PathHierarchy", PathHierarchyTokenizerFactory::new); tokenizers.register("keyword", KeywordTokenizerFactory::new); - tokenizers.register("letter", LetterTokenizerFactory::new); - tokenizers.register("lowercase", LowerCaseTokenizerFactory::new); - tokenizers.register("whitespace", WhitespaceTokenizerFactory::new); - tokenizers.register("nGram", NGramTokenizerFactory::new); - tokenizers.register("ngram", NGramTokenizerFactory::new); - tokenizers.register("edgeNGram", EdgeNGramTokenizerFactory::new); - tokenizers.register("edge_ngram", EdgeNGramTokenizerFactory::new); - tokenizers.register("pattern", PatternTokenizerFactory::new); - tokenizers.register("classic", ClassicTokenizerFactory::new); - tokenizers.register("thai", ThaiTokenizerFactory::new); tokenizers.extractAndRegister(plugins, AnalysisPlugin::getTokenizers); return tokenizers; } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java index 23e5e6795117a..6ccffd3a22fe2 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java @@ -19,18 +19,8 @@ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.LetterTokenizer; -import org.apache.lucene.analysis.core.WhitespaceTokenizer; -import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; -import org.apache.lucene.analysis.ngram.NGramTokenizer; -import org.apache.lucene.analysis.path.PathHierarchyTokenizer; -import org.apache.lucene.analysis.pattern.PatternTokenizer; -import org.apache.lucene.analysis.standard.ClassicTokenizer; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; -import org.apache.lucene.analysis.th.ThaiTokenizer; import org.elasticsearch.Version; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; @@ -41,69 +31,6 @@ public enum PreBuiltTokenizers { protected Tokenizer create(Version version) { return new StandardTokenizer(); } - }, - - CLASSIC(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new ClassicTokenizer(); - } - }, - - UAX_URL_EMAIL(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new UAX29URLEmailTokenizer(); - } - }, - - PATH_HIERARCHY(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new PathHierarchyTokenizer(); - } - }, - - LETTER(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new LetterTokenizer(); - } - }, - - WHITESPACE(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new WhitespaceTokenizer(); - } - }, - - NGRAM(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new NGramTokenizer(); - } - }, - - EDGE_NGRAM(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); - } - }, - - PATTERN(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new PatternTokenizer(Regex.compile("\\W+", null), -1); - } - }, - - THAI(CachingStrategy.ONE) { - @Override - protected Tokenizer create(Version version) { - return new ThaiTokenizer(); - } } ; diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 553744e66ef04..52e0ac8ab860f 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -19,6 +19,7 @@ package org.elasticsearch.indices.flush; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.Assertions; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; @@ -501,8 +502,18 @@ private InFlightOpsResponse performInFlightOps(InFlightOpsRequest request) { if (indexShard.routingEntry().primary() == false) { throw new IllegalStateException("[" + request.shardId() +"] expected a primary shard"); } + if (Assertions.ENABLED) { + if (logger.isTraceEnabled()) { + logger.trace("in flight operations {}, acquirers {}", indexShard.getActiveOperationsCount(), indexShard.getActiveOperations()); + } + } int opCount = indexShard.getActiveOperationsCount(); - logger.trace("{} in flight operations sampled at [{}]", request.shardId(), opCount); + // Need to snapshot the debug info twice as it's updated concurrently with the permit count. + if (Assertions.ENABLED) { + if (logger.isTraceEnabled()) { + logger.trace("in flight operations {}, acquirers {}", indexShard.getActiveOperationsCount(), indexShard.getActiveOperations()); + } + } return new InFlightOpsResponse(opCount); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 3d3749de26496..57da913ee57dd 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -950,6 +950,20 @@ protected void finalize(final List snapshots, final BlobStoreIndexShardSnapshots updatedSnapshots = new BlobStoreIndexShardSnapshots(snapshots); try { + // Delete temporary index files first, as we might otherwise fail in the next step creating the new index file if an earlier + // attempt to write an index file with this generation failed mid-way after creating the temporary file. + for (final String blobName : blobs.keySet()) { + if (indexShardSnapshotsFormat.isTempBlobName(blobName)) { + try { + blobContainer.deleteBlobIgnoringIfNotExists(blobName); + } catch (IOException e) { + logger.warn(() -> new ParameterizedMessage("[{}][{}] failed to delete index blob [{}] during finalization", + snapshotId, shardId, blobName), e); + throw e; + } + } + } + // If we deleted all snapshots, we don't need to create a new index file if (snapshots.size() > 0) { indexShardSnapshotsFormat.writeAtomic(updatedSnapshots, blobContainer, indexGeneration); @@ -957,7 +971,7 @@ protected void finalize(final List snapshots, // Delete old index files for (final String blobName : blobs.keySet()) { - if (indexShardSnapshotsFormat.isTempBlobName(blobName) || blobName.startsWith(SNAPSHOT_INDEX_PREFIX)) { + if (blobName.startsWith(SNAPSHOT_INDEX_PREFIX)) { try { blobContainer.deleteBlobIgnoringIfNotExists(blobName); } catch (IOException e) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java index e6c994a85c35d..bc5db552b9dd2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java @@ -48,17 +48,22 @@ public final RestChannelConsumer prepareRequest(final RestRequest request, final final ResizeRequest resizeRequest = new ResizeRequest(request.param("target"), request.param("index")); resizeRequest.setResizeType(getResizeType()); final String rawCopySettings = request.param("copy_settings"); - final boolean copySettings; + final Boolean copySettings; if (rawCopySettings == null) { copySettings = resizeRequest.getCopySettings(); + } else if (rawCopySettings.isEmpty()) { + copySettings = true; } else { - deprecationLogger.deprecated("parameter [copy_settings] is deprecated but was [" + rawCopySettings + "]"); - if (rawCopySettings.length() == 0) { - copySettings = true; - } else { - copySettings = Booleans.parseBoolean(rawCopySettings); + copySettings = Booleans.parseBoolean(rawCopySettings); + if (copySettings == false) { + throw new IllegalArgumentException("parameter [copy_settings] can not be explicitly set to [false]"); } } + if (copySettings == null) { + deprecationLogger.deprecated( + "resize operations without copying settings is deprecated; " + + "set parameter [copy_settings] to [true] for future default behavior"); + } resizeRequest.setCopySettings(copySettings); request.applyContentParser(resizeRequest::fromXContent); resizeRequest.timeout(request.paramAsTime("timeout", resizeRequest.timeout())); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 36ce65bc8f2be..34481b0f42662 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -283,7 +283,7 @@ public void testUnknown() throws IOException { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( new AnalyzeRequest() - .tokenizer("whitespace") + .tokenizer("standard") .addTokenFilter("foobar") .text("the qu1ck brown fox"), AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment)); @@ -296,7 +296,7 @@ public void testUnknown() throws IOException { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( new AnalyzeRequest() - .tokenizer("whitespace") + .tokenizer("standard") .addTokenFilter("lowercase") .addCharFilter("foobar") .text("the qu1ck brown fox"), @@ -318,7 +318,7 @@ public void testUnknown() throws IOException { public void testNonPreBuildTokenFilter() throws IOException { AnalyzeRequest request = new AnalyzeRequest(); - request.tokenizer("whitespace"); + request.tokenizer("standard"); request.addTokenFilter("stop"); // stop token filter is not prebuilt in AnalysisModule#setupPreConfiguredTokenFilters() request.text("the quick brown fox"); AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index e4bb197f80ace..8443ac2bf2e3d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; @@ -76,6 +77,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30416") public class ShrinkIndexIT extends ESIntegTestCase { @Override @@ -83,7 +85,6 @@ protected Collection> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30416") public void testCreateShrinkIndexToN() { int[][] possibleShardSplits = new int[][] {{8,4,2}, {9, 3, 1}, {4, 2, 1}, {15,5,1}}; int[] shardSplits = randomFrom(possibleShardSplits); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java index 77ead591a01f2..ba595de5215a3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java @@ -31,12 +31,34 @@ import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import java.io.IOException; +import java.util.function.Consumer; +import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; public class ResizeRequestTests extends ESTestCase { + public void testCopySettingsValidation() { + runTestCopySettingsValidation(false, r -> { + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, r::get); + assertThat(e, hasToString(containsString("[copySettings] can not be explicitly set to [false]"))); + }); + + runTestCopySettingsValidation(null, r -> assertNull(r.get().getCopySettings())); + runTestCopySettingsValidation(true, r -> assertTrue(r.get().getCopySettings())); + } + + private void runTestCopySettingsValidation(final Boolean copySettings, final Consumer> consumer) { + consumer.accept(() -> { + final ResizeRequest request = new ResizeRequest(); + request.setCopySettings(copySettings); + return request; + }); + } + public void testToXContent() throws IOException { { ResizeRequest request = new ResizeRequest("target", "source"); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 79cc13594e98a..c55e4851edbc2 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -188,7 +188,7 @@ public void testSimpleTermVectors() throws IOException { .addAlias(new Alias("alias")) .setSettings(Settings.builder() .put(indexSettings()) - .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") + .put("index.analysis.analyzer.tv_test.tokenizer", "standard") .putList("index.analysis.analyzer.tv_test.filter", "lowercase"))); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) @@ -260,7 +260,7 @@ public void testRandomSingleTermVectors() throws IOException { .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping) .setSettings(Settings.builder() - .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") + .put("index.analysis.analyzer.tv_test.tokenizer", "standard") .putList("index.analysis.analyzer.tv_test.filter", "lowercase"))); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) @@ -394,7 +394,7 @@ public void testSimpleTermVectorsWithGenerate() throws IOException { .addMapping("type1", mapping) .setSettings(Settings.builder() .put(indexSettings()) - .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") + .put("index.analysis.analyzer.tv_test.tokenizer", "standard") .putList("index.analysis.analyzer.tv_test.filter", "lowercase"))); ensureGreen(); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java index 3ce7dc3cd2a46..0e8877701e4b9 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.termvectors; +import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.payloads.FloatEncoder; @@ -35,6 +36,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.analysis.PreConfiguredTokenizer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.AnalysisPlugin; @@ -93,6 +95,12 @@ public TokenStream create(TokenStream tokenStream) { }); } + @Override + public List getPreConfiguredTokenizers() { + return Collections.singletonList(PreConfiguredTokenizer.singleton("mock-whitespace", + () -> new MockTokenizer(MockTokenizer.WHITESPACE, false), null)); + } + // Based on DelimitedPayloadTokenFilter: final class MockPayloadTokenFilter extends TokenFilter { private final char delimiter; @@ -151,7 +159,7 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio .startObject("field").field("type", "text").field("term_vector", "with_positions_offsets_payloads") .field("analyzer", "payload_test").endObject().endObject().endObject().endObject(); Settings setting = Settings.builder() - .put("index.analysis.analyzer.payload_test.tokenizer", "whitespace") + .put("index.analysis.analyzer.payload_test.tokenizer", "mock-whitespace") .putList("index.analysis.analyzer.payload_test.filter", "my_delimited_payload") .put("index.analysis.filter.my_delimited_payload.delimiter", delimiter) .put("index.analysis.filter.my_delimited_payload.encoding", encodingString) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 9698ab18c198b..f209f771ab089 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -137,6 +137,7 @@ public void tearDown() throws Exception { private static final UnicastHostsProvider EMPTY_HOSTS_PROVIDER = Collections::emptyList; + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/28685") public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index b5fb281454010..4ca37638a2226 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -254,4 +254,38 @@ public void testIgnoreUnmapped() throws IOException { QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("failed to find geo_point field [unmapped]")); } + + public void testPointValidation() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + QueryShardContext context = createShardContext(); + String queryInvalidLat = "{\n" + + " \"geo_polygon\":{\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"points\":[\n" + + " [-70, 140],\n" + + " [-80, 30],\n" + + " [-90, 20]\n" + + " ]\n" + + " }\n" + + " }\n" + + "}\n"; + + QueryShardException e1 = expectThrows(QueryShardException.class, () -> parseQuery(queryInvalidLat).toQuery(context)); + assertThat(e1.getMessage(), containsString("illegal latitude value [140.0] for [geo_polygon]")); + + String queryInvalidLon = "{\n" + + " \"geo_polygon\":{\n" + + " \"" + GEO_POINT_FIELD_NAME + "\":{\n" + + " \"points\":[\n" + + " [-70, 40],\n" + + " [-80, 30],\n" + + " [-190, 20]\n" + + " ]\n" + + " }\n" + + " }\n" + + "}\n"; + + QueryShardException e2 = expectThrows(QueryShardException.class, () -> parseQuery(queryInvalidLon).toQuery(context)); + assertThat(e2.getMessage(), containsString("illegal longitude value [-190.0] for [geo_polygon]")); + } } diff --git a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 9f214082d4b22..802761780a713 100644 --- a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -35,10 +35,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; - public class AnalyzeActionIT extends ESIntegTestCase { public void testSimpleAnalyzerTests() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); @@ -333,14 +331,14 @@ public void testCustomTokenFilterInRequest() throws Exception { AnalyzeResponse analyzeResponse = client().admin().indices() .prepareAnalyze() .setText("Foo buzz test") - .setTokenizer("whitespace") + .setTokenizer("standard") .addTokenFilter("lowercase") .addTokenFilter(stopFilterSettings) .setExplain(true) .get(); //tokenizer - assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("whitespace")); + assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("standard")); assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(3)); assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("Foo")); assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0)); @@ -393,41 +391,6 @@ public void testCustomTokenFilterInRequest() throws Exception { assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens()[0].getPositionLength(), equalTo(1)); } - public void testCustomTokenizerInRequest() throws Exception { - Map tokenizerSettings = new HashMap<>(); - tokenizerSettings.put("type", "nGram"); - tokenizerSettings.put("min_gram", 2); - tokenizerSettings.put("max_gram", 2); - - AnalyzeResponse analyzeResponse = client().admin().indices() - .prepareAnalyze() - .setText("good") - .setTokenizer(tokenizerSettings) - .setExplain(true) - .get(); - - //tokenizer - assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("_anonymous_tokenizer")); - assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(3)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("go")); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(2)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getPosition(), equalTo(0)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getPositionLength(), equalTo(1)); - - assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getTerm(), equalTo("oo")); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getStartOffset(), equalTo(1)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getEndOffset(), equalTo(3)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getPosition(), equalTo(1)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getPositionLength(), equalTo(1)); - - assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getTerm(), equalTo("od")); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getStartOffset(), equalTo(2)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getEndOffset(), equalTo(4)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getPosition(), equalTo(2)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getPositionLength(), equalTo(1)); - } - public void testAnalyzeKeywordField() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("test", "keyword", "type=keyword")); ensureGreen("test"); diff --git a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 27e1c1af2bb83..e31e605b6b10f 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -254,7 +254,7 @@ private String syncedFlushDescription(ShardsSyncedFlushResult result) { result.totalShards(), result.failed(), result.failureReason(), detail); } - @TestLogging("_root:DEBUG") + @TestLogging("_root:DEBUG,org.elasticsearch.indices.flush:TRACE") public void testSyncedFlushSkipOutOfSyncReplicas() throws Exception { internalCluster().ensureAtLeastNumDataNodes(between(2, 3)); final int numberOfReplicas = internalCluster().numDataNodes() - 1; @@ -296,7 +296,7 @@ public void testSyncedFlushSkipOutOfSyncReplicas() throws Exception { assertThat(fullResult.successfulShards(), equalTo(numberOfReplicas + 1)); } - @TestLogging("_root:DEBUG") + @TestLogging("_root:DEBUG,org.elasticsearch.indices.flush:TRACE") public void testDoNotRenewSyncedFlushWhenAllSealed() throws Exception { internalCluster().ensureAtLeastNumDataNodes(between(2, 3)); final int numberOfReplicas = internalCluster().numDataNodes() - 1; diff --git a/server/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/server/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index d30dab721663c..e213d9e5862c4 100644 --- a/server/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -711,7 +711,7 @@ public void testCombineTemplates() throws Exception{ " \"analysis\" : {\n" + " \"analyzer\" : {\n" + " \"custom_1\" : {\n" + - " \"tokenizer\" : \"whitespace\"\n" + + " \"tokenizer\" : \"standard\"\n" + " }\n" + " }\n" + " }\n" + diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java index 75071309458cc..2c30184ee4e35 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandlerTests.java @@ -20,15 +20,20 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import java.io.IOException; import java.util.Collections; +import java.util.Locale; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; import static org.mockito.Mockito.mock; public class RestResizeHandlerTests extends ESTestCase { @@ -36,27 +41,41 @@ public class RestResizeHandlerTests extends ESTestCase { public void testShrinkCopySettingsDeprecated() throws IOException { final RestResizeHandler.RestShrinkIndexAction handler = new RestResizeHandler.RestShrinkIndexAction(Settings.EMPTY, mock(RestController.class)); - final String copySettings = randomFrom("true", "false"); - final FakeRestRequest request = - new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) - .withParams(Collections.singletonMap("copy_settings", copySettings)) - .withPath("source/_shrink/target") - .build(); - handler.prepareRequest(request, mock(NodeClient.class)); - assertWarnings("parameter [copy_settings] is deprecated but was [" + copySettings + "]"); + for (final String copySettings : new String[]{null, "", "true", "false"}) { + runTestResizeCopySettingsDeprecated(handler, "shrink", copySettings); + } } public void testSplitCopySettingsDeprecated() throws IOException { final RestResizeHandler.RestSplitIndexAction handler = new RestResizeHandler.RestSplitIndexAction(Settings.EMPTY, mock(RestController.class)); - final String copySettings = randomFrom("true", "false"); - final FakeRestRequest request = + for (final String copySettings : new String[]{null, "", "true", "false"}) { + runTestResizeCopySettingsDeprecated(handler, "split", copySettings); + } + } + + private void runTestResizeCopySettingsDeprecated( + final RestResizeHandler handler, final String resizeOperation, final String copySettings) throws IOException { + final FakeRestRequest.Builder builder = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) .withParams(Collections.singletonMap("copy_settings", copySettings)) - .withPath("source/_split/target") - .build(); - handler.prepareRequest(request, mock(NodeClient.class)); - assertWarnings("parameter [copy_settings] is deprecated but was [" + copySettings + "]"); + .withPath(String.format(Locale.ROOT, "source/_%s/target", resizeOperation)); + if (copySettings != null) { + builder.withParams(Collections.singletonMap("copy_settings", copySettings)); + } + final FakeRestRequest request = builder.build(); + if ("false".equals(copySettings)) { + final IllegalArgumentException e = + expectThrows(IllegalArgumentException.class, () -> handler.prepareRequest(request, mock(NodeClient.class))); + assertThat(e, hasToString(containsString("parameter [copy_settings] can not be explicitly set to [false]"))); + } else { + handler.prepareRequest(request, mock(NodeClient.class)); + if (copySettings == null) { + assertWarnings( + "resize operations without copying settings is deprecated; " + + "set parameter [copy_settings] to [true] for future default behavior"); + } + } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 76646c62a5eb2..68dc7b87055c3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.aggregations.pipeline.moving.avg; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; @@ -45,7 +43,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -69,7 +66,6 @@ import static org.hamcrest.core.IsNull.nullValue; @ESIntegTestCase.SuiteScopeTestCase -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29456") public class MovAvgIT extends ESIntegTestCase { private static final String INTERVAL_FIELD = "l_value"; private static final String VALUE_FIELD = "v_value"; @@ -1307,7 +1303,7 @@ private void assertBucketContents(Histogram.Bucket actual, Double expectedCount, } else { assertThat("[value] movavg is null", valuesMovAvg, notNullValue()); assertEquals("[value] movavg does not match expected [" + valuesMovAvg.value() + " vs " + expectedValue + "]", - valuesMovAvg.value(), expectedValue, 0.1 * Math.abs(countMovAvg.value())); + valuesMovAvg.value(), expectedValue, 0.1 * Math.abs(valuesMovAvg.value())); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 7f61655a09273..9011b0b8dd69c 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -1359,7 +1359,7 @@ public void testCommonTermsTermVector() throws IOException { public void testPhrasePrefix() throws IOException { Builder builder = Settings.builder() .put(indexSettings()) - .put("index.analysis.analyzer.synonym.tokenizer", "whitespace") + .put("index.analysis.analyzer.synonym.tokenizer", "standard") .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") .put("index.analysis.filter.synonym.type", "synonym") .putList("index.analysis.filter.synonym.synonyms", "quick => fast"); @@ -2804,7 +2804,7 @@ public void testFiltersFunctionScoreQueryHighlight() throws Exception { public void testSynonyms() throws IOException { Builder builder = Settings.builder() .put(indexSettings()) - .put("index.analysis.analyzer.synonym.tokenizer", "whitespace") + .put("index.analysis.analyzer.synonym.tokenizer", "standard") .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") .put("index.analysis.filter.synonym.type", "synonym") .putList("index.analysis.filter.synonym.synonyms", "fast,quick"); diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 58565b5f264b7..fe50aaf9b73d7 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -156,7 +156,7 @@ public void testRescorePhrase() throws Exception { public void testMoreDocs() throws Exception { Builder builder = Settings.builder(); - builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); + builder.put("index.analysis.analyzer.synonym.tokenizer", "standard"); builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); @@ -234,7 +234,7 @@ public void testMoreDocs() throws Exception { // Tests a rescore window smaller than number of hits: public void testSmallRescoreWindow() throws Exception { Builder builder = Settings.builder(); - builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); + builder.put("index.analysis.analyzer.synonym.tokenizer", "standard"); builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); @@ -306,7 +306,7 @@ public void testSmallRescoreWindow() throws Exception { // Tests a rescorer that penalizes the scores: public void testRescorerMadeScoresWorse() throws Exception { Builder builder = Settings.builder(); - builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace"); + builder.put("index.analysis.analyzer.synonym.tokenizer", "standard"); builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase"); builder.put("index.analysis.filter.synonym.type", "synonym"); builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street"); diff --git a/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 37ffda5f46a0f..82bfb9889f0f0 100644 --- a/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -82,7 +82,7 @@ public void init() throws Exception { .put("index.analysis.analyzer.perfect_match.tokenizer", "keyword") .put("index.analysis.analyzer.perfect_match.filter", "lowercase") .put("index.analysis.analyzer.category.type", "custom") - .put("index.analysis.analyzer.category.tokenizer", "whitespace") + .put("index.analysis.analyzer.category.tokenizer", "standard") .put("index.analysis.analyzer.category.filter", "lowercase") ); assertAcked(builder.addMapping("test", createMapping())); diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index b2a7c045ddce9..78d14f8f62bac 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; @@ -351,7 +350,7 @@ public void testCommonTermsQueryStackedTokens() throws Exception { .put(SETTING_NUMBER_OF_SHARDS,1) .put("index.analysis.filter.syns.type","synonym") .putList("index.analysis.filter.syns.synonyms","quick,fast") - .put("index.analysis.analyzer.syns.tokenizer","whitespace") + .put("index.analysis.analyzer.syns.tokenizer","standard") .put("index.analysis.analyzer.syns.filter","syns") ) .addMapping("type1", "field1", "type=text,analyzer=syns", "field2", "type=text,analyzer=syns")); @@ -1799,56 +1798,6 @@ public void testSearchEmptyDoc() { assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } - // see #5120 - public void testNGramCopyField() { - CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder() - .put(indexSettings()) - .put(IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey(), 9) - .put("index.analysis.analyzer.my_ngram_analyzer.type", "custom") - .put("index.analysis.analyzer.my_ngram_analyzer.tokenizer", "my_ngram_tokenizer") - .put("index.analysis.tokenizer.my_ngram_tokenizer.type", "nGram") - .put("index.analysis.tokenizer.my_ngram_tokenizer.min_gram", "1") - .put("index.analysis.tokenizer.my_ngram_tokenizer.max_gram", "10") - .putList("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0])); - assertAcked(builder.addMapping("test", "origin", "type=text,copy_to=meta", "meta", "type=text,analyzer=my_ngram_analyzer")); - // we only have ngrams as the index analyzer so searches will get standard analyzer - - - client().prepareIndex("test", "test", "1").setSource("origin", "C.A1234.5678") - .setRefreshPolicy(IMMEDIATE) - .get(); - - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("meta", "1234")) - .get(); - assertHitCount(searchResponse, 1L); - - searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("meta", "1234.56")) - .get(); - assertHitCount(searchResponse, 1L); - - searchResponse = client().prepareSearch("test") - .setQuery(termQuery("meta", "A1234")) - .get(); - assertHitCount(searchResponse, 1L); - - searchResponse = client().prepareSearch("test") - .setQuery(termQuery("meta", "a1234")) - .get(); - assertHitCount(searchResponse, 0L); // it's upper case - - searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("meta", "A1234").analyzer("my_ngram_analyzer")) - .get(); // force ngram analyzer - assertHitCount(searchResponse, 1L); - - searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("meta", "a1234").analyzer("my_ngram_analyzer")) - .get(); // this one returns a hit since it's default operator is OR - assertHitCount(searchResponse, 1L); - } - public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedException { createIndex("test1"); indexRandom(true, client().prepareIndex("test1", "type1", "1").setSource("field", "Johnnie Walker Black Label"), diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index feb15044438ec..677cc4163ccf7 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -427,7 +427,7 @@ public void testSizeAndSort() throws Exception { public void testStopwordsOnlyPhraseSuggest() throws IOException { assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings( Settings.builder() - .put("index.analysis.analyzer.stopwd.tokenizer", "whitespace") + .put("index.analysis.analyzer.stopwd.tokenizer", "standard") .putList("index.analysis.analyzer.stopwd.filter", "stop") )); ensureGreen(); diff --git a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_1.json b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_1.json index e079d64eb8fda..94b9fae143a25 100644 --- a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_1.json +++ b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_1.json @@ -1,19 +1,12 @@ { - "filtered": { - "query": { - "match_all": {} - }, - "filter": { - "geo_polygon": { - "location": { - "points": { - "points": [ - [-70, 40], - [-80, 30], - [-90, 20] - ] - } - } + "geo_polygon": { + "location": { + "points": { + "points": [ + [-70, 40], + [-80, 30], + [-90, 20] + ] } } } diff --git a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_2.json b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_2.json index 0955c260727df..a7363452c54bb 100644 --- a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_2.json +++ b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_2.json @@ -1,21 +1,13 @@ { - "filtered": { - "query": { - "match_all": {} - }, - "filter": { - "geo_polygon": { - "location": { - "points": [ - [-70, 40], - [-80, 30], - [-90, 20] - ], - "something_else": { + "geo_polygon": { + "location": { + "points": [ + [-70, 40], + [-80, 30], + [-90, 20] + ], + "something_else": { - } - - } } } } diff --git a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_3.json b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_3.json index 0ac2a7bbb3abc..eef8c1ca074d1 100644 --- a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_3.json +++ b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_3.json @@ -1,12 +1,5 @@ { - "filtered": { - "query": { - "match_all": {} - }, - "filter": { - "geo_polygon": { - "location": ["WRONG"] - } - } + "geo_polygon": { + "location": ["WRONG"] } } diff --git a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_4.json b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_4.json index 51f6ad0037ea6..b2a65825c36f6 100644 --- a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_4.json +++ b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_4.json @@ -1,19 +1,12 @@ { - "filtered": { - "query": { - "match_all": {} + "geo_polygon": { + "location": { + "points": [ + [-70, 40], + [-80, 30], + [-90, 20] + ] }, - "filter": { - "geo_polygon": { - "location": { - "points": [ - [-70, 40], - [-80, 30], - [-90, 20] - ] - }, - "bla": true - } - } + "bla": true } } diff --git a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_5.json b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_5.json index 6f058f551cf60..5287154af42cc 100644 --- a/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_5.json +++ b/server/src/test/resources/org/elasticsearch/index/query/geo_polygon_exception_5.json @@ -1,19 +1,12 @@ { - "filtered": { - "query": { - "match_all": {} + "geo_polygon": { + "location": { + "points": [ + [-70, 40], + [-80, 30], + [-90, 20] + ] }, - "filter": { - "geo_polygon": { - "location": { - "points": [ - [-70, 40], - [-80, 30], - [-90, 20] - ] - }, - "bla": ["array"] - } - } + "bla": ["array"] } } diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index f6e5e6a85b4a5..055139eacf716 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -22,18 +22,10 @@ import org.apache.lucene.analysis.util.CharFilterFactory; import org.apache.lucene.analysis.util.TokenFilterFactory; import org.apache.lucene.analysis.util.TokenizerFactory; -import org.elasticsearch.Version; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.index.analysis.ClassicTokenizerFactory; -import org.elasticsearch.index.analysis.EdgeNGramTokenizerFactory; import org.elasticsearch.index.analysis.HunspellTokenFilterFactory; import org.elasticsearch.index.analysis.KeywordTokenizerFactory; -import org.elasticsearch.index.analysis.LetterTokenizerFactory; -import org.elasticsearch.index.analysis.LowerCaseTokenizerFactory; import org.elasticsearch.index.analysis.MultiTermAwareComponent; -import org.elasticsearch.index.analysis.NGramTokenizerFactory; -import org.elasticsearch.index.analysis.PathHierarchyTokenizerFactory; -import org.elasticsearch.index.analysis.PatternTokenizerFactory; import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenizer; @@ -43,9 +35,6 @@ import org.elasticsearch.index.analysis.StopTokenFilterFactory; import org.elasticsearch.index.analysis.SynonymGraphTokenFilterFactory; import org.elasticsearch.index.analysis.SynonymTokenFilterFactory; -import org.elasticsearch.index.analysis.ThaiTokenizerFactory; -import org.elasticsearch.index.analysis.UAX29URLEmailTokenizerFactory; -import org.elasticsearch.index.analysis.WhitespaceTokenizerFactory; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.test.ESTestCase; @@ -88,20 +77,20 @@ private static String toCamelCase(String s) { static final Map> KNOWN_TOKENIZERS = new MapBuilder>() // exposed in ES - .put("classic", ClassicTokenizerFactory.class) - .put("edgengram", EdgeNGramTokenizerFactory.class) + .put("classic", MovedToAnalysisCommon.class) + .put("edgengram", MovedToAnalysisCommon.class) .put("keyword", KeywordTokenizerFactory.class) - .put("letter", LetterTokenizerFactory.class) - .put("lowercase", LowerCaseTokenizerFactory.class) - .put("ngram", NGramTokenizerFactory.class) - .put("pathhierarchy", PathHierarchyTokenizerFactory.class) - .put("pattern", PatternTokenizerFactory.class) + .put("letter", MovedToAnalysisCommon.class) + .put("lowercase", MovedToAnalysisCommon.class) + .put("ngram", MovedToAnalysisCommon.class) + .put("pathhierarchy", MovedToAnalysisCommon.class) + .put("pattern", MovedToAnalysisCommon.class) .put("simplepattern", MovedToAnalysisCommon.class) .put("simplepatternsplit", MovedToAnalysisCommon.class) .put("standard", StandardTokenizerFactory.class) - .put("thai", ThaiTokenizerFactory.class) - .put("uax29urlemail", UAX29URLEmailTokenizerFactory.class) - .put("whitespace", WhitespaceTokenizerFactory.class) + .put("thai", MovedToAnalysisCommon.class) + .put("uax29urlemail", MovedToAnalysisCommon.class) + .put("whitespace", MovedToAnalysisCommon.class) // this one "seems to mess up offsets". probably shouldn't be a tokenizer... .put("wikipedia", Void.class) @@ -294,23 +283,8 @@ protected Map> getPreConfiguredTokenizers() { Map> tokenizers = new HashMap<>(); // TODO drop this temporary shim when all the old style tokenizers have been migrated to new style for (PreBuiltTokenizers tokenizer : PreBuiltTokenizers.values()) { - final Class luceneFactoryClazz; - switch (tokenizer) { - case UAX_URL_EMAIL: - luceneFactoryClazz = org.apache.lucene.analysis.standard.UAX29URLEmailTokenizerFactory.class; - break; - case PATH_HIERARCHY: - luceneFactoryClazz = Void.class; - break; - default: - luceneFactoryClazz = null; - } - tokenizers.put(tokenizer.name().toLowerCase(Locale.ROOT), luceneFactoryClazz); + tokenizers.put(tokenizer.name().toLowerCase(Locale.ROOT), null); } - // TODO drop aliases once they are moved to module - tokenizers.put("nGram", tokenizers.get("ngram")); - tokenizers.put("edgeNGram", tokenizers.get("edge_ngram")); - tokenizers.put("PathHierarchy", tokenizers.get("path_hierarchy")); return tokenizers; } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 8aff12edc8a53..743be6d1bcb01 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.io.InputStream; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.NoSuchFileException; import java.util.Arrays; import java.util.HashMap; @@ -149,7 +150,7 @@ public void testVerifyOverwriteFails() throws IOException { final BytesArray bytesArray = new BytesArray(data); writeBlob(container, blobName, bytesArray); // should not be able to overwrite existing blob - expectThrows(IOException.class, () -> writeBlob(container, blobName, bytesArray)); + expectThrows(FileAlreadyExistsException.class, () -> writeBlob(container, blobName, bytesArray)); container.deleteBlob(blobName); writeBlob(container, blobName, bytesArray); // after deleting the previous blob, we should be able to write to it again } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 7a278aeadaede..4717fc7c1ba31 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -519,6 +519,19 @@ public static byte randomByte() { return (byte) random().nextInt(); } + /** + * Helper method to create a byte array of a given length populated with random byte values + * + * @see #randomByte() + */ + public static byte[] randomByteArrayOfLength(int size) { + byte[] bytes = new byte[size]; + for (int i = 0; i < size; i++) { + bytes[i] = randomByte(); + } + return bytes; + } + public static short randomShort() { return (short) random().nextInt(); } diff --git a/x-pack/build.gradle b/x-pack/build.gradle index ea7ea86887b06..95a99ee664a05 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -57,12 +57,4 @@ subprojects { ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-watcher:${version}": xpackModule('watcher')] ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-ccr:${version}": xpackModule('ccr')] - bwcVersions.snapshotProjectNames.each { snapshotName -> - Version snapshot = bwcVersions.getSnapshotForProject(snapshotName) - if (snapshot != null && snapshot.onOrAfter("6.3.0")) { - String snapshotProject = ":x-pack:plugin:bwc:${snapshotName}" - project(snapshotProject).ext.bwcVersion = snapshot - ext.projectSubstitutions["org.elasticsearch.plugin:x-pack:${snapshot}"] = snapshotProject - } - } } diff --git a/x-pack/plugin/bwc/build.gradle b/x-pack/plugin/bwc/build.gradle deleted file mode 100644 index 757448e35cd12..0000000000000 --- a/x-pack/plugin/bwc/build.gradle +++ /dev/null @@ -1,226 +0,0 @@ -import org.apache.tools.ant.taskdefs.condition.Os -import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.test.NodeInfo - -import static org.elasticsearch.gradle.BuildPlugin.getJavaHome - -/** - * Subdirectories of this project are dummy projects which does a local - * checkout of the appropriate version's branch, and builds a snapshot. This - * allows backcompat tests to test against the next unreleased versions - * without relying on snapshots. - */ - -subprojects { - - Version bwcVersion = bwcVersions.getSnapshotForProject(project.name) - if (bwcVersion == null) { - // this project wont do anything - return - } - - String bwcBranch - if (project.name == 'next-minor-snapshot') { - // this is always a .x series - bwcBranch = "${bwcVersion.major}.x" - } else { - bwcBranch = "${bwcVersion.major}.${bwcVersion.minor}" - } - - apply plugin: 'distribution' - // Not published so no need to assemble - tasks.remove(assemble) - build.dependsOn.remove('assemble') - - File esCheckoutDir = file("${buildDir}/bwc/checkout-es-${bwcBranch}") - /* Delay building the path as the path will not exist during configuration which will - * fail on Windows due to getting the short name requiring the path to already exist. - */ - Object esCheckoutPath = """${-> - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - esCheckoutDir.mkdirs() - NodeInfo.getShortPathName(esCheckoutDir.toString()) - } else { - esCheckoutDir.toString() - } - }""" - File xpackCheckoutDir = file("${esCheckoutDir}-extra/x-pack-elasticsearch") - Object xpackCheckoutPath = """${-> - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - xpackCheckoutDir.mkdirs() - NodeInfo.getShortPathName(xpackCheckoutDir.toString()) - } else { - xpackCheckoutDir.toString() - } - }""" - - final String remote = System.getProperty("tests.bwc.remote", "elastic") - - task createElasticsearchClone(type: LoggedExec) { - onlyIf { esCheckoutDir.exists() == false } - commandLine = ['git', 'clone', rootDir, esCheckoutPath] - } - - task createXPackClone(type: LoggedExec) { - onlyIf { xpackCheckoutDir.exists() == false } - commandLine = ['git', 'clone', xpackRootProject.projectDir, xpackCheckoutPath] - } - - // we use regular Exec here to ensure we always get output, regardless of logging level - task findElasticsearchRemote(type: Exec) { - dependsOn createElasticsearchClone - workingDir = esCheckoutDir - commandLine = ['git', 'remote', '-v'] - ignoreExitValue = true - ByteArrayOutputStream output = new ByteArrayOutputStream() - standardOutput = output - doLast { - if (execResult.exitValue != 0) { - output.toString('UTF-8').eachLine { line -> logger.error(line) } - execResult.assertNormalExitValue() - } - project.ext.esRemoteExists = false - output.toString('UTF-8').eachLine { - if (it.contains("${remote}\t")) { - project.ext.esRemoteExists = true - } - } - } - } - - task findXPackRemote(type: Exec) { - dependsOn createXPackClone - workingDir = xpackCheckoutDir - commandLine = ['git', 'remote', '-v'] - ignoreExitValue = true - ByteArrayOutputStream output = new ByteArrayOutputStream() - standardOutput = output - doLast { - if (execResult.exitValue != 0) { - output.toString('UTF-8').eachLine { line -> logger.error(line) } - execResult.assertNormalExitValue() - } - project.ext.xpackRemoteExists = false - output.toString('UTF-8').eachLine { - if (it.contains("${remote}\t")) { - project.ext.xpackRemoteExists = true - } - } - } - } - - task addElasticsearchRemote(type: LoggedExec) { - dependsOn findElasticsearchRemote - onlyIf { project.ext.esRemoteExists == false } - workingDir = esCheckoutDir - commandLine = ['git', 'remote', 'add', "${remote}", "git@github.com:${remote}/elasticsearch.git"] - } - - task addXPackRemote(type: LoggedExec) { - dependsOn findXPackRemote - onlyIf { project.ext.xpackRemoteExists == false } - workingDir = xpackCheckoutDir - commandLine = ['git', 'remote', 'add', "${remote}", "git@github.com:${remote}/x-pack-elasticsearch.git"] - } - - task fetchElasticsearchLatest(type: LoggedExec) { - dependsOn addElasticsearchRemote - workingDir = esCheckoutDir - commandLine = ['git', 'fetch', '--all'] - } - - task fetchXPackLatest(type: LoggedExec) { - dependsOn addXPackRemote - workingDir = xpackCheckoutDir - commandLine = ['git', 'fetch', '--all'] - } - - String esBuildMetadataKey = "bwc_refspec_${project.path.substring(1)}_elasticsearch" - task checkoutElasticsearchBwcBranch(type: LoggedExec) { - dependsOn fetchElasticsearchLatest - def String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(esBuildMetadataKey, "${remote}/${bwcBranch}")) - workingDir = esCheckoutDir - commandLine = ['git', 'checkout', refspec] - } - - String xpackBuildMetadataKey = "bwc_refspec_${project.path.substring(1)}_xpack" - task checkoutXPackBwcBranch(type: LoggedExec) { - dependsOn fetchXPackLatest - def String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(xpackBuildMetadataKey, "${remote}/${bwcBranch}")) - workingDir = xpackCheckoutDir - commandLine = ['git', 'checkout', refspec] - } - - File esBuildMetadataFile = project.file("build/${project.name}_elasticsearch/build_metadata") - task writeElasticsearchBuildMetadata(type: LoggedExec) { - dependsOn checkoutElasticsearchBwcBranch - workingDir = esCheckoutDir - commandLine = ['git', 'rev-parse', 'HEAD'] - ignoreExitValue = true - ByteArrayOutputStream output = new ByteArrayOutputStream() - standardOutput = output - doLast { - if (execResult.exitValue != 0) { - output.toString('UTF-8').eachLine { line -> logger.error(line) } - execResult.assertNormalExitValue() - } - project.mkdir(esBuildMetadataFile.parent) - esBuildMetadataFile.setText("${esBuildMetadataKey}=${output.toString('UTF-8')}", 'UTF-8') - } - } - - File xpackBuildMetadataFile = project.file("build/${project.name}_xpack/build_metadata") - task writeXPackBuildMetadata(type: LoggedExec) { - dependsOn checkoutXPackBwcBranch - workingDir = xpackCheckoutDir - commandLine = ['git', 'rev-parse', 'HEAD'] - ignoreExitValue = true - ByteArrayOutputStream output = new ByteArrayOutputStream() - standardOutput = output - doLast { - if (execResult.exitValue != 0) { - output.toString('UTF-8').eachLine { line -> logger.error(line) } - execResult.assertNormalExitValue() - } - project.mkdir(xpackBuildMetadataFile.parent) - xpackBuildMetadataFile.setText("${xpackBuildMetadataKey}=${output.toString('UTF-8')}", 'UTF-8') - } - } - - File bwcZip = file("${xpackCheckoutDir}/plugin/build/distributions/x-pack-${bwcVersion}.zip") - task buildBwcVersion(type: Exec) { - dependsOn checkoutXPackBwcBranch, checkoutElasticsearchBwcBranch, writeElasticsearchBuildMetadata, writeXPackBuildMetadata - workingDir = xpackCheckoutDir - if (["5.6", "6.0", "6.1"].contains(bwcBranch)) { - // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds - environment('JAVA_HOME', getJavaHome(it, 8)) - } else if ("6.2".equals(bwcBranch)) { - environment('JAVA_HOME', getJavaHome(it, 9)) - } else { - environment('JAVA_HOME', project.compilerJavaHome) - } - if (Os.isFamily(Os.FAMILY_WINDOWS)) { - executable 'cmd' - args '/C', 'call', new File(xpackCheckoutDir, 'gradlew').toString() - } else { - executable new File(xpackCheckoutDir, 'gradlew').toString() - } - args ":x-pack-elasticsearch:plugin:assemble", "-Dbuild.snapshot=true" - final LogLevel logLevel = gradle.startParameter.logLevel - if ([LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG].contains(logLevel)) { - args "--${logLevel.name().toLowerCase(Locale.ENGLISH)}" - } - final String showStacktraceName = gradle.startParameter.showStacktrace.name() - assert ["INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL"].contains(showStacktraceName) - if (showStacktraceName.equals("ALWAYS")) { - args "--stacktrace" - } else if (showStacktraceName.equals("ALWAYS_FULL")) { - args "--full-stacktrace" - } - } - - artifacts { - 'default' file: bwcZip, name: 'x-pack', type: 'zip', builtBy: buildBwcVersion - } -} diff --git a/x-pack/plugin/bwc/maintenance-bugfix-snapshot/build.gradle b/x-pack/plugin/bwc/maintenance-bugfix-snapshot/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/bwc/next-bugfix-snapshot/build.gradle b/x-pack/plugin/bwc/next-bugfix-snapshot/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/bwc/next-minor-snapshot/build.gradle b/x-pack/plugin/bwc/next-minor-snapshot/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/bwc/staged-minor-snapshot/build.gradle b/x-pack/plugin/bwc/staged-minor-snapshot/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index f7998a52d496a..1fb387b0b6c2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -45,7 +45,7 @@ public PutJobAction.Response newResponse() { public static class Request extends AcknowledgedRequest implements ToXContentObject { public static UpdateJobAction.Request parseRequest(String jobId, XContentParser parser) { - JobUpdate update = JobUpdate.PARSER.apply(parser, null).setJobId(jobId).build(); + JobUpdate update = JobUpdate.EXTERNAL_PARSER.apply(parser, null).setJobId(jobId).build(); return new UpdateJobAction.Request(jobId, update); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java index 79663637f31a5..53603f4dde423 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java @@ -30,26 +30,34 @@ public class JobUpdate implements Writeable, ToXContentObject { public static final ParseField DETECTORS = new ParseField("detectors"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + // For internal updates + static final ConstructingObjectParser INTERNAL_PARSER = new ConstructingObjectParser<>( + "job_update", args -> new Builder((String) args[0])); + + // For parsing REST requests + public static final ConstructingObjectParser EXTERNAL_PARSER = new ConstructingObjectParser<>( "job_update", args -> new Builder((String) args[0])); static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID); - PARSER.declareStringArray(Builder::setGroups, Job.GROUPS); - PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); - PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.CONFIG_PARSER, Job.MODEL_PLOT_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.CONFIG_PARSER, Job.ANALYSIS_LIMITS); - PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName())), Job.BACKGROUND_PERSIST_INTERVAL); - PARSER.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); - PARSER.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT); - PARSER.declareString(Builder::setModelSnapshotId, Job.MODEL_SNAPSHOT_ID); - PARSER.declareLong(Builder::setEstablishedModelMemory, Job.ESTABLISHED_MODEL_MEMORY); - PARSER.declareString(Builder::setJobVersion, Job.JOB_VERSION); + for (ConstructingObjectParser parser : Arrays.asList(INTERNAL_PARSER, EXTERNAL_PARSER)) { + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID); + parser.declareStringArray(Builder::setGroups, Job.GROUPS); + parser.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); + parser.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); + parser.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.CONFIG_PARSER, Job.MODEL_PLOT_CONFIG); + parser.declareObject(Builder::setAnalysisLimits, AnalysisLimits.CONFIG_PARSER, Job.ANALYSIS_LIMITS); + parser.declareString((builder, val) -> builder.setBackgroundPersistInterval( + TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName())), Job.BACKGROUND_PERSIST_INTERVAL); + parser.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); + parser.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS); + parser.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS); + parser.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS); + parser.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT); + } + // These fields should not be set by a REST request + INTERNAL_PARSER.declareString(Builder::setModelSnapshotId, Job.MODEL_SNAPSHOT_ID); + INTERNAL_PARSER.declareLong(Builder::setEstablishedModelMemory, Job.ESTABLISHED_MODEL_MEMORY); + INTERNAL_PARSER.declareString(Builder::setJobVersion, Job.JOB_VERSION); } private final String jobId; @@ -224,14 +232,14 @@ public Long getEstablishedModelMemory() { return establishedModelMemory; } - public boolean isAutodetectProcessUpdate() { - return modelPlotConfig != null || detectorUpdates != null; - } - public Version getJobVersion() { return jobVersion; } + public boolean isAutodetectProcessUpdate() { + return modelPlotConfig != null || detectorUpdates != null; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -332,7 +340,7 @@ public Set getUpdateFields() { /** * Updates {@code source} with the new values in this object returning a new {@link Job}. * - * @param source Source job to be updated + * @param source Source job to be updated * @param maxModelMemoryLimit The maximum model memory allowed * @return A new job equivalent to {@code source} updated. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityLifecycleServiceField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityLifecycleServiceField.java deleted file mode 100644 index 6af642a1065f2..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityLifecycleServiceField.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.security; - -public final class SecurityLifecycleServiceField { - public static final String SECURITY_TEMPLATE_NAME = "security-index-template"; - public static final String SECURITY_INDEX_NAME = ".security"; - - private SecurityLifecycleServiceField() {} -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java index 3b8ea2910d13d..6bd6228f2efe1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/TokenMetaData.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -74,13 +75,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; TokenMetaData that = (TokenMetaData)o; - return keys.equals(that.keys) && currentKeyHash.equals(that.currentKeyHash); + return keys.equals(that.keys) && Arrays.equals(currentKeyHash, that.currentKeyHash); } @Override public int hashCode() { int result = keys.hashCode(); - result = 31 * result + currentKeyHash.hashCode(); + result = 31 * result + Arrays.hashCode(currentKeyHash); return result; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java index 48c75bf2c6e4c..0078dd4a05822 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java @@ -26,6 +26,8 @@ public class JobUpdateTests extends AbstractSerializingTestCase { + private boolean useInternalParser = randomBoolean(); + @Override protected JobUpdate createTestInstance() { JobUpdate.Builder update = new JobUpdate.Builder(randomAlphaOfLength(4)); @@ -84,13 +86,13 @@ protected JobUpdate createTestInstance() { if (randomBoolean()) { update.setCustomSettings(Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10))); } - if (randomBoolean()) { + if (useInternalParser && randomBoolean()) { update.setModelSnapshotId(randomAlphaOfLength(10)); } - if (randomBoolean()) { + if (useInternalParser && randomBoolean()) { update.setEstablishedModelMemory(randomNonNegativeLong()); } - if (randomBoolean()) { + if (useInternalParser && randomBoolean()) { update.setJobVersion(randomFrom(Version.CURRENT, Version.V_6_2_0, Version.V_6_1_0)); } @@ -104,7 +106,11 @@ protected Writeable.Reader instanceReader() { @Override protected JobUpdate doParseInstance(XContentParser parser) { - return JobUpdate.PARSER.apply(parser, null).build(); + if (useInternalParser) { + return JobUpdate.INTERNAL_PARSER.apply(parser, null).build(); + } else { + return JobUpdate.EXTERNAL_PARSER.apply(parser, null).build(); + } } public void testMergeWithJob() { @@ -141,7 +147,7 @@ public void testMergeWithJob() { JobUpdate update = updateBuilder.build(); Job.Builder jobBuilder = new Job.Builder("foo"); - jobBuilder.setGroups(Arrays.asList("group-1")); + jobBuilder.setGroups(Collections.singletonList("group-1")); Detector.Builder d1 = new Detector.Builder("info_content", "domain"); d1.setOverFieldName("mlcategory"); Detector.Builder d2 = new Detector.Builder("min", "field"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/TokenMetaDataTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/TokenMetaDataTests.java new file mode 100644 index 0000000000000..77f7c4dd3ad04 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/TokenMetaDataTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.authc; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class TokenMetaDataTests extends ESTestCase { + + public void testEqualsAndHashCode() { + final int numKeyAndTimestamps = scaledRandomIntBetween(1, 8); + final List keyAndTimestampList = generateKeyAndTimestampListOfSize(numKeyAndTimestamps); + final byte[] currentKeyHash = randomByteArrayOfLength(8); + final TokenMetaData original = new TokenMetaData(keyAndTimestampList, currentKeyHash); + + EqualsHashCodeTestUtils.checkEqualsAndHashCode(original, tokenMetaData -> { + final List copiedList = new ArrayList<>(keyAndTimestampList); + final byte[] copyKeyHash = Arrays.copyOf(currentKeyHash, currentKeyHash.length); + return new TokenMetaData(copiedList, copyKeyHash); + }, tokenMetaData -> { + final List modifiedList = generateKeyAndTimestampListOfSize(numKeyAndTimestamps); + return new TokenMetaData(modifiedList, currentKeyHash); + }); + + EqualsHashCodeTestUtils.checkEqualsAndHashCode(original, tokenMetaData -> { + BytesStreamOutput out = new BytesStreamOutput(); + tokenMetaData.writeTo(out); + return new TokenMetaData(out.bytes().streamInput()); + }, tokenMetaData -> { + final byte[] modifiedKeyHash = randomByteArrayOfLength(8); + return new TokenMetaData(keyAndTimestampList, modifiedKeyHash); + }); + } + + private List generateKeyAndTimestampListOfSize(int size) { + final List keyAndTimestampList = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + keyAndTimestampList.add( + new KeyAndTimestamp(new SecureString(randomAlphaOfLengthBetween(1, 12).toCharArray()), randomNonNegativeLong())); + } + return keyAndTimestampList; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java index 75b7ea1e5934f..ec62901d65a6e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/StateProcessor.java @@ -91,10 +91,10 @@ private BytesReference splitAndPersist(String jobId, BytesReference bytesRef, in } void persist(String jobId, BytesReference bytes) throws IOException { - logger.trace("[{}] ES API CALL: bulk index", jobId); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(bytes, AnomalyDetectorsIndex.jobStateIndexName(), ElasticsearchMappings.DOC_TYPE, XContentType.JSON); if (bulkRequest.numberOfActions() > 0) { + logger.trace("[{}] Persisting job state document", jobId); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { client.bulk(bulkRequest).actionGet(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 5243e86289e9f..b3489bd86b83c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -234,8 +234,8 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_TEMPLATE_NAME; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_INDEX_FORMAT; public class Security extends Plugin implements ActionPlugin, IngestPlugin, NetworkPlugin, ClusterPlugin, DiscoveryPlugin, MapperPlugin, @@ -442,8 +442,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(realms); components.add(reservedRealm); - securityLifecycleService.securityIndex().addIndexHealthChangeListener(nativeRoleMappingStore::onSecurityIndexHealthChange); - securityLifecycleService.securityIndex().addIndexOutOfDateListener(nativeRoleMappingStore::onSecurityIndexOutOfDateChange); + securityLifecycleService.securityIndex().addIndexStateListener(nativeRoleMappingStore::onSecurityIndexStateChange); AuthenticationFailureHandler failureHandler = null; String extensionName = null; @@ -475,8 +474,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste } final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, rolesProviders, threadPool.getThreadContext(), getLicenseState()); - securityLifecycleService.securityIndex().addIndexHealthChangeListener(allRolesStore::onSecurityIndexHealthChange); - securityLifecycleService.securityIndex().addIndexOutOfDateListener(allRolesStore::onSecurityIndexOutOfDateChange); + securityLifecycleService.securityIndex().addIndexStateListener(allRolesStore::onSecurityIndexStateChange); // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be // minimal getLicenseState().addListener(allRolesStore::invalidateAll); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java index 7a05ff13d126d..d4ad757ff4cab 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java @@ -27,11 +27,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.function.BiConsumer; -import java.util.function.Consumer; -import java.util.function.Predicate; - -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; /** * This class is used to provide a lifecycle for services that is based on the cluster's state @@ -51,8 +46,6 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust public static final String INTERNAL_SECURITY_INDEX = SecurityIndexManager.INTERNAL_SECURITY_INDEX; public static final String SECURITY_INDEX_NAME = ".security"; - private static final Version MIN_READ_VERSION = Version.V_5_0_0; - private final Settings settings; private final ThreadPool threadPool; private final IndexAuditTrail indexAuditTrail; @@ -127,36 +120,7 @@ private void close() { } } - public static boolean securityIndexMappingSufficientToRead(ClusterState clusterState, Logger logger) { - return checkMappingVersions(clusterState, logger, MIN_READ_VERSION::onOrBefore); - } - - static boolean securityIndexMappingUpToDate(ClusterState clusterState, Logger logger) { - return checkMappingVersions(clusterState, logger, Version.CURRENT::equals); - } - - private static boolean checkMappingVersions(ClusterState clusterState, Logger logger, Predicate versionPredicate) { - return SecurityIndexManager.checkIndexMappingVersionMatches(SECURITY_INDEX_NAME, clusterState, logger, versionPredicate); - } - public static List indexNames() { return Collections.unmodifiableList(Arrays.asList(SECURITY_INDEX_NAME, INTERNAL_SECURITY_INDEX)); } - - /** - * Is the move from {@code previousHealth} to {@code currentHealth} a move from an unhealthy ("RED") index state to a healthy - * ("non-RED") state. - */ - public static boolean isMoveFromRedToNonRed(ClusterIndexHealth previousHealth, ClusterIndexHealth currentHealth) { - return (previousHealth == null || previousHealth.getStatus() == ClusterHealthStatus.RED) - && currentHealth != null && currentHealth.getStatus() != ClusterHealthStatus.RED; - } - - /** - * Is the move from {@code previousHealth} to {@code currentHealth} a move from index-exists to index-deleted - */ - public static boolean isIndexDeleted(ClusterIndexHealth previousHealth, ClusterIndexHealth currentHealth) { - return previousHealth != null && currentHealth == null; - } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/BytesKey.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/BytesKey.java index 1534b78899f8b..0ead753a4461c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/BytesKey.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/BytesKey.java @@ -14,7 +14,7 @@ * Simple wrapper around bytes so that it can be used as a cache key. The hashCode is computed * once upon creation and cached. */ -public class BytesKey { +public final class BytesKey { final byte[] bytes; private final int hashCode; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java index 1b4f90a1bdc67..6d897e3c64f1a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ExpiredTokenRemover.java @@ -22,7 +22,6 @@ import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import java.time.Instant; import java.time.temporal.ChronoUnit; @@ -31,6 +30,7 @@ import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; /** * Responsible for cleaning the invalidated tokens from the invalidated tokens index. @@ -50,7 +50,7 @@ final class ExpiredTokenRemover extends AbstractRunnable { @Override public void doRun() { - SearchRequest searchRequest = new SearchRequest(SecurityLifecycleServiceField.SECURITY_INDEX_NAME); + SearchRequest searchRequest = new SearchRequest(SECURITY_INDEX_NAME); DeleteByQueryRequest expiredDbq = new DeleteByQueryRequest(searchRequest); if (timeout != TimeValue.MINUS_ONE) { expiredDbq.setTimeout(timeout); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java index 6e97071cea994..b50264a73e949 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java @@ -96,7 +96,7 @@ public static Map getFactories(ThreadPool threadPool, Res map.put(FileRealmSettings.TYPE, config -> new FileRealm(config, resourceWatcherService)); map.put(NativeRealmSettings.TYPE, config -> { final NativeRealm nativeRealm = new NativeRealm(config, nativeUsersStore); - securityLifecycleService.securityIndex().addIndexHealthChangeListener(nativeRealm::onSecurityIndexHealthChange); + securityLifecycleService.securityIndex().addIndexStateListener(nativeRealm::onSecurityIndexStateChange); return nativeRealm; }); map.put(LdapRealmSettings.AD_TYPE, config -> new LdapRealm(LdapRealmSettings.AD_TYPE, config, sslService, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 7a43faa31c1e7..b1ae7a7506a1f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -68,7 +68,6 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.ScrollHelper; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; @@ -118,6 +117,7 @@ import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; /** * Service responsible for the creation, validation, and other management of {@link UserToken} @@ -256,7 +256,7 @@ public void createUserToken(Authentication authentication, Authentication origin .endObject(); builder.endObject(); IndexRequest request = - client.prepareIndex(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) + client.prepareIndex(SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) .setOpType(OpType.CREATE) .setSource(builder) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -372,7 +372,7 @@ void decodeToken(String token, ActionListener listener) throws IOExce decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> lifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { final GetRequest getRequest = - client.prepareGet(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, + client.prepareGet(SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(tokenId)).request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, ActionListener.wrap(response -> { @@ -533,7 +533,7 @@ private void indexBwcInvalidation(UserToken userToken, ActionListener l listener.onFailure(invalidGrantException("failed to invalidate token")); } else { final String invalidatedTokenId = getInvalidatedTokenDocumentId(userToken); - IndexRequest indexRequest = client.prepareIndex(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) + IndexRequest indexRequest = client.prepareIndex(SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) .setOpType(OpType.CREATE) .setSource("doc_type", INVALIDATED_TOKEN_DOC_TYPE, "expiration_time", expirationEpochMilli) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -577,7 +577,7 @@ private void indexInvalidation(String tokenDocId, Version version, ActionListene if (attemptCount.get() > 5) { listener.onFailure(invalidGrantException("failed to invalidate token")); } else { - UpdateRequest request = client.prepareUpdate(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, tokenDocId) + UpdateRequest request = client.prepareUpdate(SECURITY_INDEX_NAME, TYPE, tokenDocId) .setDoc(srcPrefix, Collections.singletonMap("invalidated", true)) .setVersion(documentVersion) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -609,7 +609,7 @@ private void indexInvalidation(String tokenDocId, Version version, ActionListene || isShardNotAvailableException(cause)) { attemptCount.incrementAndGet(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), + client.prepareGet(SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), ActionListener.wrap(getResult -> { if (getResult.isExists()) { Map source = getResult.getSource(); @@ -674,7 +674,7 @@ private void findTokenFromRefreshToken(String refreshToken, ActionListener 5) { listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { - SearchRequest request = client.prepareSearch(SecurityLifecycleServiceField.SECURITY_INDEX_NAME) + SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) .setQuery(QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("doc_type", "token")) .filter(QueryBuilders.termQuery("refresh_token.token", refreshToken))) @@ -718,7 +718,7 @@ private void innerRefresh(String tokenDocId, Authentication userAuth, ActionList if (attemptCount.getAndIncrement() > 5) { listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { - GetRequest getRequest = client.prepareGet(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(); + GetRequest getRequest = client.prepareGet(SECURITY_INDEX_NAME, TYPE, tokenDocId).request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, ActionListener.wrap(response -> { if (response.isExists()) { @@ -739,7 +739,7 @@ private void innerRefresh(String tokenDocId, Authentication userAuth, ActionList in.setVersion(authVersion); Authentication authentication = new Authentication(in); UpdateRequest updateRequest = - client.prepareUpdate(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, tokenDocId) + client.prepareUpdate(SECURITY_INDEX_NAME, TYPE, tokenDocId) .setVersion(response.getVersion()) .setDoc("refresh_token", Collections.singletonMap("refreshed", true)) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) @@ -854,7 +854,7 @@ public void findActiveTokensForRealm(String realmName, ActionListener { MultiGetRequest mGetRequest = client.prepareMultiGet() - .add(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken)) - .add(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) + .add(SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken)) + .add(SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) .request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, mGetRequest, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java index 6b8f9eb703db0..c9ccdbb75c0bb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java @@ -6,14 +6,16 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; + +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; /** * User/password realm that is backed by an Elasticsearch index @@ -37,12 +39,8 @@ protected void doAuthenticate(UsernamePasswordToken token, ActionListener user) { - char[] hash = users.get(username); + final char[] hash = users.get(username); if (hash == null) { return AuthenticationResult.notHandled(); } @@ -91,7 +91,7 @@ public AuthenticationResult verifyPassword(String username, SecureString passwor } public boolean userExists(String username) { - return users != null && users.containsKey(username); + return users.containsKey(username); } public static Path resolveFile(Environment env) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java index 1631fef60ea89..e17d8c5c7ecfa 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserRolesStore.java @@ -75,11 +75,8 @@ int entriesCount() { } public String[] roles(String username) { - if (userRoles == null) { - return Strings.EMPTY_ARRAY; - } - String[] roles = userRoles.get(username); - return roles == null ? Strings.EMPTY_ARRAY : userRoles.get(username); + final String[] roles = userRoles.get(username); + return roles == null ? Strings.EMPTY_ARRAY : roles; } public static Path resolveFile(Environment env) { @@ -160,11 +157,7 @@ public static Map parseFile(Path path, @Nullable Logger logger } for (String user : roleUsers) { - List roles = userToRoles.get(user); - if (roles == null) { - roles = new ArrayList<>(); - userToRoles.put(user, roles); - } + List roles = userToRoles.computeIfAbsent(user, k -> new ArrayList<>()); roles.add(role); } } @@ -185,11 +178,7 @@ public static void writeFile(Map userToRoles, Path path) { HashMap> roleToUsers = new HashMap<>(); for (Map.Entry entry : userToRoles.entrySet()) { for (String role : entry.getValue()) { - List users = roleToUsers.get(role); - if (users == null) { - users = new ArrayList<>(); - roleToUsers.put(role, users); - } + List users = roleToUsers.computeIfAbsent(role, k -> new ArrayList<>()); users.add(entry.getKey()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 3d084b79b0422..59d3a9d87442c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.bytes.BytesReference; @@ -37,9 +38,9 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; import org.elasticsearch.xpack.core.security.client.SecurityClient; import org.elasticsearch.xpack.security.SecurityLifecycleService; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; import java.io.InputStream; @@ -62,13 +63,13 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.isIndexDeleted; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.isMoveFromRedToNonRed; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; /** * This store reads + writes {@link ExpressionRoleMapping role mappings} in an Elasticsearch - * {@link SecurityLifecycleServiceField#SECURITY_INDEX_NAME index}. + * {@link SecurityLifecycleService#SECURITY_INDEX_NAME index}. *
* The store is responsible for all read and write operations as well as * {@link #resolveRoles(UserData, ActionListener) resolving roles}. @@ -322,17 +323,13 @@ private void reportStats(ActionListener> listener, List void refreshRealms(ActionListener listener, Result result) { String[] realmNames = this.realmsToRefresh.toArray(new String[realmsToRefresh.size()]); final SecurityClient securityClient = new SecurityClient(client); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 1760b23e13b7d..36c0987f50de9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -39,7 +39,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; @@ -78,6 +77,7 @@ import static org.elasticsearch.xpack.core.security.SecurityField.setting; import static org.elasticsearch.xpack.core.security.support.Exceptions.authorizationError; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; public class AuthorizationService extends AbstractComponent { public static final Setting ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING = @@ -302,7 +302,7 @@ && isSuperuser(authentication.getUser()) == false) { // only the XPackUser is allowed to work with this index, but we should allow indices monitoring actions through for debugging // purposes. These monitor requests also sometimes resolve indices concretely and then requests them logger.debug("user [{}] attempted to directly perform [{}] against the security index [{}]", - authentication.getUser().principal(), action, SecurityLifecycleServiceField.SECURITY_INDEX_NAME); + authentication.getUser().principal(), action, SECURITY_INDEX_NAME); throw denial(authentication, action, request, permission.names()); } else { putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, indicesAccessControl); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index f57deed68a5a2..5a005d7445b36 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -35,6 +36,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; import java.util.Arrays; @@ -54,8 +56,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.SecurityField.setting; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.isIndexDeleted; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.isMoveFromRedToNonRed; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed; /** * A composite roles store that combines built in roles, file-based roles, and index-based roles. Checks the built in roles first, then the @@ -324,17 +326,13 @@ public void usageStats(ActionListener> listener) { }, listener::onFailure)); } - public void onSecurityIndexHealthChange(ClusterIndexHealth previousHealth, ClusterIndexHealth currentHealth) { - if (isMoveFromRedToNonRed(previousHealth, currentHealth) || isIndexDeleted(previousHealth, currentHealth)) { + public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, SecurityIndexManager.State currentState) { + if (isMoveFromRedToNonRed(previousState, currentState) || isIndexDeleted(previousState, currentState) || + previousState.isIndexUpToDate != currentState.isIndexUpToDate) { invalidateAll(); } } - public void onSecurityIndexOutOfDateChange(boolean prevOutOfDate, boolean outOfDate) { - assert prevOutOfDate != outOfDate : "this method should only be called if the two values are different"; - invalidateAll(); - } - /** * A mutable class that can be used to represent the combination of one or more {@link IndicesPrivileges} */ diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 8eb7eaac6b50b..a4b465b4f520c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -37,7 +37,6 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackClientActionPlugin; import org.elasticsearch.xpack.core.security.ScrollHelper; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheResponse; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; @@ -65,6 +64,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.security.SecurityField.setting; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ROLE_TYPE; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; /** * NativeRolesStore is a {@code RolesStore} that, instead of reading from a @@ -122,7 +122,7 @@ public void getRoleDescriptors(String[] names, final ActionListener supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { - SearchRequest request = client.prepareSearch(SecurityLifecycleServiceField.SECURITY_INDEX_NAME) + SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) .setScroll(TimeValue.timeValueSeconds(10L)) .setQuery(query) .setSize(1000) @@ -141,7 +141,7 @@ public void deleteRole(final DeleteRoleRequest deleteRoleRequest, final ActionLi listener.onFailure(new UnsupportedOperationException("roles may not be deleted using a tribe node")); } else { securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - DeleteRequest request = client.prepareDelete(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, + DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(deleteRoleRequest.name())).request(); request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, @@ -185,7 +185,7 @@ void innerPutRole(final PutRoleRequest request, final RoleDescriptor role, final return; } executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareIndex(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) + client.prepareIndex(SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role.getName())) .setSource(xContentBuilder) .setRefreshPolicy(request.getRefreshPolicy()) .request(), @@ -216,10 +216,10 @@ public void usageStats(ActionListener> listener) { securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareMultiSearch() - .add(client.prepareSearch(SecurityLifecycleServiceField.SECURITY_INDEX_NAME) + .add(client.prepareSearch(SECURITY_INDEX_NAME) .setQuery(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) .setSize(0)) - .add(client.prepareSearch(SecurityLifecycleServiceField.SECURITY_INDEX_NAME) + .add(client.prepareSearch(SECURITY_INDEX_NAME) .setQuery(QueryBuilders.boolQuery() .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) .must(QueryBuilders.boolQuery() @@ -229,7 +229,7 @@ public void usageStats(ActionListener> listener) { .should(existsQuery("indices.fields")))) .setSize(0) .setTerminateAfter(1)) - .add(client.prepareSearch(SecurityLifecycleServiceField.SECURITY_INDEX_NAME) + .add(client.prepareSearch(SECURITY_INDEX_NAME) .setQuery(QueryBuilders.boolQuery() .must(QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE)) .filter(existsQuery("indices.query"))) @@ -300,7 +300,7 @@ public void onFailure(Exception e) { private void executeGetRoleRequest(String role, ActionListener listener) { securityLifecycleService.securityIndex().prepareIndexIfNeededThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, + client.prepareGet(SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role)).request(), listener, client::get)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index d00007490d9a3..4bcfb779b0d50 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -23,6 +23,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -41,6 +42,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; @@ -52,8 +54,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_FORMAT_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; /** * Manages the lifecycle of a single index, its template, mapping and and data upgrades/migrations. @@ -63,16 +64,15 @@ public class SecurityIndexManager extends AbstractComponent { public static final String INTERNAL_SECURITY_INDEX = ".security-" + IndexUpgradeCheckVersion.UPRADE_VERSION; public static final int INTERNAL_INDEX_FORMAT = 6; public static final String SECURITY_VERSION_STRING = "security-version"; - public static final String TEMPLATE_VERSION_PATTERN = - Pattern.quote("${security.template.version}"); + public static final String TEMPLATE_VERSION_PATTERN = Pattern.quote("${security.template.version}"); + public static final String SECURITY_TEMPLATE_NAME = "security-index-template"; private final String indexName; private final Client client; - private final List> indexHealthChangeListeners = new CopyOnWriteArrayList<>(); - private final List> indexOutOfDateListeners = new CopyOnWriteArrayList<>(); + private final List> stateChangeListeners = new CopyOnWriteArrayList<>(); - private volatile State indexState = new State(false, false, false, false, null); + private volatile State indexState = new State(false, false, false, false, null, null); public SecurityIndexManager(Settings settings, Client client, String indexName) { super(settings); @@ -107,81 +107,31 @@ public boolean isMappingUpToDate() { } /** - * Adds a listener which will be notified when the security index health changes. The previous and - * current health will be provided to the listener so that the listener can determine if any action - * needs to be taken. + * Add a listener for notifications on state changes to the configured index. + * + * The previous and current state are provided. */ - public void addIndexHealthChangeListener(BiConsumer listener) { - indexHealthChangeListeners.add(listener); - } - - /** - * Adds a listener which will be notified when the security index out of date value changes. The previous and - * current value will be provided to the listener so that the listener can determine if any action - * needs to be taken. - */ - public void addIndexOutOfDateListener(BiConsumer listener) { - indexOutOfDateListeners.add(listener); + public void addIndexStateListener(BiConsumer listener) { + stateChangeListeners.add(listener); } public void clusterChanged(ClusterChangedEvent event) { - final boolean previousUpToDate = this.indexState.isIndexUpToDate; - processClusterState(event.state()); - checkIndexHealthChange(event); - if (previousUpToDate != this.indexState.isIndexUpToDate) { - notifyIndexOutOfDateListeners(previousUpToDate, this.indexState.isIndexUpToDate); - } - } - - private void processClusterState(ClusterState clusterState) { - assert clusterState != null; - final IndexMetaData securityIndex = resolveConcreteIndex(indexName, clusterState.metaData()); - final boolean indexExists = securityIndex != null; + final State previousState = indexState; + final IndexMetaData indexMetaData = resolveConcreteIndex(indexName, event.state().metaData()); + final boolean indexExists = indexMetaData != null; final boolean isIndexUpToDate = indexExists == false || - INDEX_FORMAT_SETTING.get(securityIndex.getSettings()).intValue() == INTERNAL_INDEX_FORMAT; - final boolean indexAvailable = checkIndexAvailable(clusterState); - final boolean mappingIsUpToDate = indexExists == false || checkIndexMappingUpToDate(clusterState); - final Version mappingVersion = oldestIndexMappingVersion(clusterState); - this.indexState = new State(indexExists, isIndexUpToDate, indexAvailable, mappingIsUpToDate, mappingVersion); - } - - private void checkIndexHealthChange(ClusterChangedEvent event) { - final ClusterState state = event.state(); - final ClusterState previousState = event.previousState(); - final IndexMetaData indexMetaData = resolveConcreteIndex(indexName, state.metaData()); - final IndexMetaData previousIndexMetaData = resolveConcreteIndex(indexName, previousState.metaData()); - if (indexMetaData != null) { - final ClusterIndexHealth currentHealth = - new ClusterIndexHealth(indexMetaData, state.getRoutingTable().index(indexMetaData.getIndex())); - final ClusterIndexHealth previousHealth = previousIndexMetaData != null ? new ClusterIndexHealth(previousIndexMetaData, - previousState.getRoutingTable().index(previousIndexMetaData.getIndex())) : null; - - if (previousHealth == null || previousHealth.getStatus() != currentHealth.getStatus()) { - notifyIndexHealthChangeListeners(previousHealth, currentHealth); - } - } else if (previousIndexMetaData != null) { - final ClusterIndexHealth previousHealth = - new ClusterIndexHealth(previousIndexMetaData, previousState.getRoutingTable().index(previousIndexMetaData.getIndex())); - notifyIndexHealthChangeListeners(previousHealth, null); - } - } - - private void notifyIndexHealthChangeListeners(ClusterIndexHealth previousHealth, ClusterIndexHealth currentHealth) { - for (BiConsumer consumer : indexHealthChangeListeners) { - try { - consumer.accept(previousHealth, currentHealth); - } catch (Exception e) { - logger.warn(new ParameterizedMessage("failed to notify listener [{}] of index health change", consumer), e); - } - } - } - - private void notifyIndexOutOfDateListeners(boolean previous, boolean current) { - for (BiConsumer consumer : indexOutOfDateListeners) { - try { - consumer.accept(previous, current); - } catch (Exception e) { - logger.warn(new ParameterizedMessage("failed to notify listener [{}] of index out of date change", consumer), e); + INDEX_FORMAT_SETTING.get(indexMetaData.getSettings()).intValue() == INTERNAL_INDEX_FORMAT; + final boolean indexAvailable = checkIndexAvailable(event.state()); + final boolean mappingIsUpToDate = indexExists == false || checkIndexMappingUpToDate(event.state()); + final Version mappingVersion = oldestIndexMappingVersion(event.state()); + final ClusterHealthStatus indexStatus = indexMetaData == null ? null : + new ClusterIndexHealth(indexMetaData, event.state().getRoutingTable().index(indexMetaData.getIndex())).getStatus(); + final State newState = new State(indexExists, isIndexUpToDate, indexAvailable, mappingIsUpToDate, mappingVersion, indexStatus); + this.indexState = newState; + + if (newState.equals(previousState) == false) { + for (BiConsumer listener : stateChangeListeners) { + listener.accept(previousState, newState); } } } @@ -195,7 +145,6 @@ private boolean checkIndexAvailable(ClusterState state) { return false; } - /** * Returns the routing-table for this index, or null if the index does not exist. */ @@ -351,23 +300,59 @@ private Tuple loadMappingAndSettingsSourceFromTemplate() { PutIndexTemplateRequest request = new PutIndexTemplateRequest(SECURITY_TEMPLATE_NAME).source(template, XContentType.JSON); return new Tuple<>(request.mappings().get("doc"), request.settings()); } + + /** + * Return true if the state moves from an unhealthy ("RED") index state to a healthy ("non-RED") state. + */ + public static boolean isMoveFromRedToNonRed(State previousState, State currentState) { + return (previousState.indexStatus == null || previousState.indexStatus == ClusterHealthStatus.RED) + && currentState.indexStatus != null && currentState.indexStatus != ClusterHealthStatus.RED; + } + + /** + * Return true if the state moves from the index existing to the index not existing. + */ + public static boolean isIndexDeleted(State previousState, State currentState) { + return previousState.indexStatus != null && currentState.indexStatus == null; + } + /** - * Holder class so we can update all values at once + * State of the security index. */ - private static class State { - private final boolean indexExists; - private final boolean isIndexUpToDate; - private final boolean indexAvailable; - private final boolean mappingUpToDate; - private final Version mappingVersion; - - private State(boolean indexExists, boolean isIndexUpToDate, boolean indexAvailable, - boolean mappingUpToDate, Version mappingVersion) { + public static class State { + public final boolean indexExists; + public final boolean isIndexUpToDate; + public final boolean indexAvailable; + public final boolean mappingUpToDate; + public final Version mappingVersion; + public final ClusterHealthStatus indexStatus; + + public State(boolean indexExists, boolean isIndexUpToDate, boolean indexAvailable, + boolean mappingUpToDate, Version mappingVersion, ClusterHealthStatus indexStatus) { this.indexExists = indexExists; this.isIndexUpToDate = isIndexUpToDate; this.indexAvailable = indexAvailable; this.mappingUpToDate = mappingUpToDate; this.mappingVersion = mappingVersion; + this.indexStatus = indexStatus; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + State state = (State) o; + return indexExists == state.indexExists && + isIndexUpToDate == state.isIndexUpToDate && + indexAvailable == state.indexAvailable && + mappingUpToDate == state.mappingUpToDate && + Objects.equals(mappingVersion, state.mappingVersion) && + indexStatus == state.indexStatus; + } + + @Override + public int hashCode() { + return Objects.hash(indexExists, isIndexUpToDate, indexAvailable, mappingUpToDate, mappingVersion, indexStatus); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java index 0ddec86c1b5e8..54b8d72260ac7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/ClearRolesCacheTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.NativeRealmIntegTestCase; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; @@ -26,6 +25,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.NONE; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -57,7 +57,7 @@ public void setupForTests() { logger.debug("--> created role [{}]", role); } - ensureGreen(SecurityLifecycleServiceField.SECURITY_INDEX_NAME); + ensureGreen(SECURITY_INDEX_NAME); // warm up the caches on every node for (NativeRolesStore rolesStore : internalCluster().getInstances(NativeRolesStore.class)) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java index 1ea71217ca255..42c529d44b773 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java @@ -16,13 +16,13 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.client.SecurityClient; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; import org.junit.Before; @@ -66,7 +66,7 @@ protected Settings nodeSettings(int nodeOrdinal) { @Override public Set excludeTemplates() { Set templates = Sets.newHashSet(super.excludeTemplates()); - templates.add(SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME); // don't remove the security index template + templates.add(SecurityIndexManager.SECURITY_TEMPLATE_NAME); // don't remove the security index template return templates; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 8946e6f8bcdde..e8dd50ac7330c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -7,6 +7,7 @@ import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -42,6 +43,7 @@ import org.elasticsearch.xpack.security.LocalStateSecurity; import org.elasticsearch.xpack.security.Security; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -61,10 +63,8 @@ import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.securityIndexMappingSufficientToRead; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; - +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsCollectionContaining.hasItem; @@ -481,7 +481,8 @@ public void assertSecurityIndexActive(TestCluster testCluster) throws Exception XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint().startObject(); assertTrue("security index mapping not sufficient to read:\n" + Strings.toString(clusterState.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject()), - securityIndexMappingSufficientToRead(clusterState, logger)); + SecurityIndexManager.checkIndexMappingVersionMatches(SECURITY_INDEX_NAME, clusterState, logger, + Version.CURRENT.minimumIndexCompatibilityVersion()::onOrBefore)); Index securityIndex = resolveSecurityIndex(clusterState.metaData()); if (securityIndex != null) { IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(securityIndex); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java deleted file mode 100644 index bf4cdbae1cd43..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java +++ /dev/null @@ -1,243 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; - -import org.elasticsearch.Version; -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.FilterClient; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.MockTransportClient; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; -import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; -import org.elasticsearch.xpack.security.support.SecurityIndexManager; -import org.elasticsearch.xpack.security.test.SecurityTestUtils; -import org.elasticsearch.xpack.core.template.TemplateUtils; -import org.junit.After; -import org.junit.Before; - -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME; -import static org.elasticsearch.xpack.security.SecurityLifecycleService.securityIndexMappingUpToDate; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SecurityLifecycleServiceTests extends ESTestCase { - private TransportClient transportClient; - private ThreadPool threadPool; - private SecurityLifecycleService securityLifecycleService; - private static final ClusterState EMPTY_CLUSTER_STATE = - new ClusterState.Builder(new ClusterName("test-cluster")).build(); - private CopyOnWriteArrayList listeners; - - @Before - public void setup() { - DiscoveryNode localNode = mock(DiscoveryNode.class); - when(localNode.getHostAddress()).thenReturn(buildNewFakeTransportAddress().toString()); - ClusterService clusterService = mock(ClusterService.class); - when(clusterService.localNode()).thenReturn(localNode); - - threadPool = new TestThreadPool("security template service tests"); - transportClient = new MockTransportClient(Settings.EMPTY); - Client client = new FilterClient(transportClient) { - @Override - protected > - void doExecute(Action action, Request request, - ActionListener listener) { - listeners.add(listener); - } - }; - securityLifecycleService = new SecurityLifecycleService(Settings.EMPTY, clusterService, - threadPool, client, mock(IndexAuditTrail.class)); - listeners = new CopyOnWriteArrayList<>(); - } - - @After - public void stop() throws InterruptedException { - if (transportClient != null) { - transportClient.close(); - } - terminate(threadPool); - } - - public void testIndexTemplateIsIdentifiedAsUpToDate() throws IOException { - ClusterState.Builder clusterStateBuilder = createClusterStateWithTemplate( - "/" + SECURITY_TEMPLATE_NAME + ".json" - ); - securityLifecycleService.clusterChanged(new ClusterChangedEvent("test-event", - clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); - // No upgrade actions run - assertThat(listeners.size(), equalTo(0)); - } - - public void testIndexTemplateVersionMatching() throws Exception { - String templateString = "/" + SECURITY_TEMPLATE_NAME + ".json"; - ClusterState.Builder clusterStateBuilder = createClusterStateWithTemplate(templateString); - final ClusterState clusterState = clusterStateBuilder.build(); - - assertTrue(SecurityIndexManager.checkTemplateExistsAndVersionMatches( - SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME, clusterState, logger, - Version.V_5_0_0::before)); - assertFalse(SecurityIndexManager.checkTemplateExistsAndVersionMatches( - SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME, clusterState, logger, - Version.V_5_0_0::after)); - } - - public void testUpToDateMappingsAreIdentifiedAsUpToDate() throws IOException { - String securityTemplateString = "/" + SECURITY_TEMPLATE_NAME + ".json"; - ClusterState.Builder clusterStateBuilder = createClusterStateWithMappingAndTemplate(securityTemplateString); - securityLifecycleService.clusterChanged(new ClusterChangedEvent("test-event", - clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); - assertThat(listeners.size(), equalTo(0)); - } - - public void testMappingVersionMatching() throws IOException { - String templateString = "/" + SECURITY_TEMPLATE_NAME + ".json"; - ClusterState.Builder clusterStateBuilder = createClusterStateWithMappingAndTemplate(templateString); - securityLifecycleService.clusterChanged(new ClusterChangedEvent("test-event", - clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); - final SecurityIndexManager securityIndex = securityLifecycleService.securityIndex(); - assertTrue(securityIndex.checkMappingVersion(Version.V_5_0_0::before)); - assertFalse(securityIndex.checkMappingVersion(Version.V_5_0_0::after)); - } - - public void testMissingVersionMappingThrowsError() throws IOException { - String templateString = "/missing-version-" + SECURITY_TEMPLATE_NAME + ".json"; - ClusterState.Builder clusterStateBuilder = createClusterStateWithMappingAndTemplate(templateString); - final ClusterState clusterState = clusterStateBuilder.build(); - IllegalStateException exception = expectThrows(IllegalStateException.class, - () -> securityIndexMappingUpToDate(clusterState, logger)); - assertEquals("Cannot read security-version string in index " + SECURITY_INDEX_NAME, - exception.getMessage()); - } - - public void testMissingIndexIsIdentifiedAsUpToDate() throws IOException { - final ClusterName clusterName = new ClusterName("test-cluster"); - final ClusterState.Builder clusterStateBuilder = ClusterState.builder(clusterName); - String mappingString = "/" + SECURITY_TEMPLATE_NAME + ".json"; - IndexTemplateMetaData.Builder templateMeta = getIndexTemplateMetaData(SECURITY_TEMPLATE_NAME, mappingString); - MetaData.Builder builder = new MetaData.Builder(clusterStateBuilder.build().getMetaData()); - builder.put(templateMeta); - clusterStateBuilder.metaData(builder); - securityLifecycleService.clusterChanged(new ClusterChangedEvent("test-event", clusterStateBuilder.build() - , EMPTY_CLUSTER_STATE)); - assertThat(listeners.size(), equalTo(0)); - } - - private ClusterState.Builder createClusterStateWithMapping(String securityTemplateString) throws IOException { - final ClusterState clusterState = createClusterStateWithIndex(securityTemplateString).build(); - final String indexName = clusterState.metaData().getAliasAndIndexLookup() - .get(SECURITY_INDEX_NAME).getIndices().get(0).getIndex().getName(); - return ClusterState.builder(clusterState).routingTable(SecurityTestUtils.buildIndexRoutingTable(indexName)); - } - - private ClusterState.Builder createClusterStateWithMappingAndTemplate(String securityTemplateString) throws IOException { - ClusterState.Builder clusterStateBuilder = createClusterStateWithMapping(securityTemplateString); - MetaData.Builder metaDataBuilder = new MetaData.Builder(clusterStateBuilder.build().metaData()); - String securityMappingString = "/" + SECURITY_TEMPLATE_NAME + ".json"; - IndexTemplateMetaData.Builder securityTemplateMeta = getIndexTemplateMetaData(SECURITY_TEMPLATE_NAME, securityMappingString); - metaDataBuilder.put(securityTemplateMeta); - return clusterStateBuilder.metaData(metaDataBuilder); - } - - private static IndexMetaData.Builder createIndexMetadata(String indexName, String templateString) throws IOException { - String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), - SecurityIndexManager.TEMPLATE_VERSION_PATTERN); - PutIndexTemplateRequest request = new PutIndexTemplateRequest(); - request.source(template, XContentType.JSON); - IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); - indexMetaData.settings(Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .build()); - - for (Map.Entry entry : request.mappings().entrySet()) { - indexMetaData.putMapping(entry.getKey(), entry.getValue()); - } - return indexMetaData; - } - - public ClusterState.Builder createClusterStateWithTemplate(String securityTemplateString) throws IOException { - // add the correct mapping no matter what the template - ClusterState clusterState = createClusterStateWithIndex("/" + SECURITY_TEMPLATE_NAME + ".json").build(); - final MetaData.Builder metaDataBuilder = new MetaData.Builder(clusterState.metaData()); - metaDataBuilder.put(getIndexTemplateMetaData(SECURITY_TEMPLATE_NAME, securityTemplateString)); - return ClusterState.builder(clusterState).metaData(metaDataBuilder); - } - - private ClusterState.Builder createClusterStateWithIndex(String securityTemplate) throws IOException { - final MetaData.Builder metaDataBuilder = new MetaData.Builder(); - final boolean withAlias = randomBoolean(); - final String securityIndexName = SECURITY_INDEX_NAME + (withAlias ? "-" + randomAlphaOfLength(5) : ""); - metaDataBuilder.put(createIndexMetadata(securityIndexName, securityTemplate)); - - ClusterState.Builder clusterStateBuilder = ClusterState.builder(state()); - if (withAlias) { - // try with .security index as an alias - clusterStateBuilder.metaData(SecurityTestUtils.addAliasToMetaData(metaDataBuilder.build(), securityIndexName)); - } else { - // try with .security index as a concrete index - clusterStateBuilder.metaData(metaDataBuilder); - } - - clusterStateBuilder.routingTable(SecurityTestUtils.buildIndexRoutingTable(securityIndexName)); - return clusterStateBuilder; - } - - private static IndexTemplateMetaData.Builder getIndexTemplateMetaData( - String templateName, String templateString) throws IOException { - - String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), - SecurityIndexManager.TEMPLATE_VERSION_PATTERN); - PutIndexTemplateRequest request = new PutIndexTemplateRequest(); - request.source(template, XContentType.JSON); - IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder(templateName) - .patterns(Arrays.asList(generateRandomStringArray(10, 100, false, false))); - for (Map.Entry entry : request.mappings().entrySet()) { - templateBuilder.putMapping(entry.getKey(), entry.getValue()); - } - return templateBuilder; - } - - // cluster state where local node is master - private static ClusterState state() { - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); - discoBuilder.masterNodeId("1"); - discoBuilder.localNodeId("1"); - ClusterState.Builder state = ClusterState.builder(new ClusterName("test-cluster")); - state.nodes(discoBuilder); - state.metaData(MetaData.builder().generateClusterUuidIfNeeded()); - return state.build(); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java index a1e8cc3c4e993..ec448f14e9160 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.transport.TransportMessage; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -52,6 +51,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.LocalStateSecurity; import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail.Message; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; import org.joda.time.DateTime; @@ -254,7 +254,7 @@ public void afterTest() { @Override protected Set excludeTemplates() { - return Sets.newHashSet(SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME, IndexAuditTrail.INDEX_TEMPLATE_NAME); + return Sets.newHashSet(SecurityIndexManager.SECURITY_TEMPLATE_NAME, IndexAuditTrail.INDEX_TEMPLATE_NAME); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java index 5b90b2e1e4609..7002803a3d49c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java @@ -18,9 +18,9 @@ import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; import org.junit.Before; @@ -70,7 +70,7 @@ public Settings nodeSettings(int nodeOrdinal) { @Override protected Set excludeTemplates() { - return Sets.newHashSet(SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME, IndexAuditTrail.INDEX_TEMPLATE_NAME); + return Sets.newHashSet(SecurityIndexManager.SECURITY_TEMPLATE_NAME, IndexAuditTrail.INDEX_TEMPLATE_NAME); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java index f0af7a2539e42..47eb1eabae159 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java @@ -47,10 +47,10 @@ public void testNativeRealmRegistersIndexHealthChangeListener() throws Exception Settings settings = Settings.builder().put("path.home", createTempDir()).build(); factories.get(NativeRealmSettings.TYPE).create(new RealmConfig("test", Settings.EMPTY, settings, TestEnvironment.newEnvironment(settings), new ThreadContext(settings))); - verify(securityIndex).addIndexHealthChangeListener(isA(BiConsumer.class)); + verify(securityIndex).addIndexStateListener(isA(BiConsumer.class)); factories.get(NativeRealmSettings.TYPE).create(new RealmConfig("test", Settings.EMPTY, settings, TestEnvironment.newEnvironment(settings), new ThreadContext(settings))); - verify(securityIndex, times(2)).addIndexHealthChangeListener(isA(BiConsumer.class)); + verify(securityIndex, times(2)).addIndexStateListener(isA(BiConsumer.class)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index 9ed34b295bac1..aa6b9cab99467 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenResponse; @@ -44,6 +43,7 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.equalTo; public class TokenAuthIntegTests extends SecurityIntegTestCase { @@ -146,7 +146,7 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { assertTrue(invalidateResponse.isCreated()); AtomicReference docId = new AtomicReference<>(); assertBusy(() -> { - SearchResponse searchResponse = client.prepareSearch(SecurityLifecycleServiceField.SECURITY_INDEX_NAME) + SearchResponse searchResponse = client.prepareSearch(SECURITY_INDEX_NAME) .setSource(SearchSourceBuilder.searchSource() .query(QueryBuilders.termQuery("doc_type", TokenService.INVALIDATED_TOKEN_DOC_TYPE))) .setSize(1) @@ -159,7 +159,7 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { // hack doc to modify the time to the day before Instant dayBefore = created.minus(1L, ChronoUnit.DAYS); assertTrue(Instant.now().isAfter(dayBefore)); - client.prepareUpdate(SecurityLifecycleServiceField.SECURITY_INDEX_NAME, "doc", docId.get()) + client.prepareUpdate(SECURITY_INDEX_NAME, "doc", docId.get()) .setDoc("expiration_time", dayBefore.toEpochMilli()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); @@ -177,8 +177,8 @@ public void testExpiredTokensDeletedAfterExpiration() throws Exception { assertEquals("token malformed", e.getMessage()); } } - client.admin().indices().prepareRefresh(SecurityLifecycleServiceField.SECURITY_INDEX_NAME).get(); - SearchResponse searchResponse = client.prepareSearch(SecurityLifecycleServiceField.SECURITY_INDEX_NAME) + client.admin().indices().prepareRefresh(SECURITY_INDEX_NAME).get(); + SearchResponse searchResponse = client.prepareSearch(SECURITY_INDEX_NAME) .setSource(SearchSourceBuilder.searchSource() .query(QueryBuilders.termQuery("doc_type", TokenService.INVALIDATED_TOKEN_DOC_TYPE))) .setSize(0) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index 15b0355421c77..da9699e9ecfcb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.core.security.authc.support.CharArrays; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.junit.BeforeClass; import java.nio.charset.StandardCharsets; @@ -24,6 +23,7 @@ import java.util.HashSet; import java.util.Set; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.is; /** @@ -78,7 +78,7 @@ public void testRetrieveUsers() throws Exception { addedUsers.add(uname); } logger.error("--> waiting for .security index"); - ensureGreen(SecurityLifecycleServiceField.SECURITY_INDEX_NAME); + ensureGreen(SECURITY_INDEX_NAME); MockTerminal t = new MockTerminal(); String username = nodeClientUsername(); @@ -123,7 +123,7 @@ public void testRetrieveRoles() throws Exception { addedRoles.add(rname); } logger.error("--> waiting for .security index"); - ensureGreen(SecurityLifecycleServiceField.SECURITY_INDEX_NAME); + ensureGreen(SECURITY_INDEX_NAME); MockTerminal t = new MockTerminal(); String username = nodeClientUsername(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java index 8b64ad4b1ec56..7e2d5242101c1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.concurrent.atomic.AtomicInteger; @@ -20,6 +21,10 @@ public class NativeRealmTests extends ESTestCase { + private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { + return new SecurityIndexManager.State(true, true, true, true, null, indexStatus); + } + public void testCacheClearOnIndexHealthChange() { final AtomicInteger numInvalidation = new AtomicInteger(0); int expectedInvalidation = 0; @@ -34,34 +39,34 @@ void clearCache() { }; // existing to no longer present - ClusterIndexHealth previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - ClusterIndexHealth currentHealth = null; - nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + SecurityIndexManager.State previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + SecurityIndexManager.State currentState = dummyState(null); + nativeRealm.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // doesn't exist to exists - previousHealth = null; - currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(null); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + nativeRealm.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // green or yellow to red - previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentHealth = getClusterIndexHealth(ClusterHealthStatus.RED); - nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(ClusterHealthStatus.RED); + nativeRealm.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); // red to non red - previousHealth = getClusterIndexHealth(ClusterHealthStatus.RED); - currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(ClusterHealthStatus.RED); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + nativeRealm.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // green to yellow or yellow to green - previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentHealth = getClusterIndexHealth( - previousHealth.getStatus() == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); - nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); + nativeRealm.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index ba7499e08d327..6fa9cb868e909 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.esnative.NativeUserStoreField; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -47,6 +46,7 @@ import java.util.function.Consumer; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; @@ -115,7 +115,7 @@ public void testBlankPasswordInIndexImpliesDefaultPassword() throws Exception { values.put(PASSWORD_FIELD, BLANK_PASSWORD); final GetResult result = new GetResult( - SecurityLifecycleServiceField.SECURITY_INDEX_NAME, + SECURITY_INDEX_NAME, NativeUserStoreField.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUserStoreField.RESERVED_USER_TYPE, randomAlphaOfLength(12)), 1L, @@ -184,7 +184,7 @@ public void testVerifyNonExistentUser() throws Exception { nativeUsersStore.verifyPassword(username, password, future); final GetResult getResult = new GetResult( - SecurityLifecycleServiceField.SECURITY_INDEX_NAME, + SECURITY_INDEX_NAME, NativeUserStoreField.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), 1L, @@ -225,7 +225,7 @@ private void respondToGetUserRequest(String username, SecureString password, Str values.put(User.Fields.TYPE.getPreferredName(), NativeUsersStore.USER_DOC_TYPE); final BytesReference source = BytesReference.bytes(jsonBuilder().map(values)); final GetResult getResult = new GetResult( - SecurityLifecycleServiceField.SECURITY_INDEX_NAME, + SECURITY_INDEX_NAME, NativeUserStoreField.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), 1L, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 3a67ab9447e32..693118c21bde5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; @@ -41,7 +40,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.security.test.SecurityTestUtils.getClusterIndexHealth; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; @@ -128,6 +126,9 @@ private String randomiseDn(String dn) { return dn; } + private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { + return new SecurityIndexManager.State(true, true, true, true, null, indexStatus); + } public void testCacheClearOnIndexHealthChange() { final AtomicInteger numInvalidation = new AtomicInteger(0); @@ -135,34 +136,34 @@ public void testCacheClearOnIndexHealthChange() { int expectedInvalidation = 0; // existing to no longer present - ClusterIndexHealth previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - ClusterIndexHealth currentHealth = null; - store.onSecurityIndexHealthChange(previousHealth, currentHealth); + SecurityIndexManager.State previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + SecurityIndexManager.State currentState = dummyState(null); + store.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // doesn't exist to exists - previousHealth = null; - currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - store.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(null); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + store.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // green or yellow to red - previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentHealth = getClusterIndexHealth(ClusterHealthStatus.RED); - store.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(ClusterHealthStatus.RED); + store.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); // red to non red - previousHealth = getClusterIndexHealth(ClusterHealthStatus.RED); - currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - store.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(ClusterHealthStatus.RED); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + store.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // green to yellow or yellow to green - previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentHealth = getClusterIndexHealth( - previousHealth.getStatus() == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); - store.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); + store.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); } @@ -170,10 +171,14 @@ public void testCacheClearOnIndexOutOfDateChange() { final AtomicInteger numInvalidation = new AtomicInteger(0); final NativeRoleMappingStore store = buildRoleMappingStoreForInvalidationTesting(numInvalidation); - store.onSecurityIndexOutOfDateChange(false, true); + store.onSecurityIndexStateChange( + new SecurityIndexManager.State(true, false, true, true, null, null), + new SecurityIndexManager.State(true, true, true, true, null, null)); assertEquals(1, numInvalidation.get()); - store.onSecurityIndexOutOfDateChange(true, false); + store.onSecurityIndexStateChange( + new SecurityIndexManager.State(true, true, true, true, null, null), + new SecurityIndexManager.State(true, false, true, true, null, null)); assertEquals(2, numInvalidation.get()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index ebf422c14218b..3013a7c41c2ac 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -140,7 +140,7 @@ import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationExceptionRunAs; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java index aa42220925138..b8f94c8134371 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java @@ -20,11 +20,11 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import java.util.List; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.containsInAnyOrder; public class AuthorizedIndicesTests extends ESTestCase { @@ -81,7 +81,7 @@ public void testSecurityIndicesAreRemovedFromRegularUser() { MetaData metaData = MetaData.builder() .put(new IndexMetaData.Builder("an-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) .put(new IndexMetaData.Builder("another-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder(SecurityLifecycleServiceField.SECURITY_INDEX_NAME).settings(indexSettings) + .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME).settings(indexSettings) .numberOfShards(1).numberOfReplicas(0).build(), true) .build(); @@ -97,12 +97,12 @@ public void testSecurityIndicesAreNotRemovedFromSuperUsers() { MetaData metaData = MetaData.builder() .put(new IndexMetaData.Builder("an-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) .put(new IndexMetaData.Builder("another-index").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) - .put(new IndexMetaData.Builder(SecurityLifecycleServiceField.SECURITY_INDEX_NAME).settings(indexSettings) + .put(new IndexMetaData.Builder(SECURITY_INDEX_NAME).settings(indexSettings) .numberOfShards(1).numberOfReplicas(0).build(), true) .build(); AuthorizedIndices authorizedIndices = new AuthorizedIndices(user, role, SearchAction.NAME, metaData); List list = authorizedIndices.get(); - assertThat(list, containsInAnyOrder("an-index", "another-index", SecurityLifecycleServiceField.SECURITY_INDEX_NAME)); + assertThat(list, containsInAnyOrder("an-index", "another-index", SECURITY_INDEX_NAME)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 0789d6de07079..d03389f5ddbbb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.graph.action.GraphExploreRequest; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; @@ -75,6 +74,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -88,8 +88,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; - public class IndicesAndAliasesResolverTests extends ESTestCase { private User user; @@ -1200,14 +1198,14 @@ public void testXPackSecurityUserHasAccessToSecurityIndex() { { final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); - assertThat(indices, hasItem(SecurityLifecycleServiceField.SECURITY_INDEX_NAME)); + assertThat(indices, hasItem(SECURITY_INDEX_NAME)); } { IndicesAliasesRequest aliasesRequest = new IndicesAliasesRequest(); aliasesRequest.addAliasAction(AliasActions.add().alias("security_alias").index(SECURITY_INDEX_NAME)); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, IndicesAliasesAction.NAME); List indices = resolveIndices(aliasesRequest, authorizedIndices).getLocal(); - assertThat(indices, hasItem(SecurityLifecycleServiceField.SECURITY_INDEX_NAME)); + assertThat(indices, hasItem(SECURITY_INDEX_NAME)); } } @@ -1215,7 +1213,7 @@ public void testXPackUserDoesNotHaveAccessToSecurityIndex() { SearchRequest request = new SearchRequest(); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackUser.INSTANCE, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); - assertThat(indices, not(hasItem(SecurityLifecycleServiceField.SECURITY_INDEX_NAME))); + assertThat(indices, not(hasItem(SECURITY_INDEX_NAME))); } public void testNonXPackUserAccessingSecurityIndex() { @@ -1227,7 +1225,7 @@ public void testNonXPackUserAccessingSecurityIndex() { SearchRequest request = new SearchRequest(); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(allAccessUser, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); - assertThat(indices, not(hasItem(SecurityLifecycleServiceField.SECURITY_INDEX_NAME))); + assertThat(indices, not(hasItem(SECURITY_INDEX_NAME))); } { @@ -1235,7 +1233,7 @@ public void testNonXPackUserAccessingSecurityIndex() { aliasesRequest.addAliasAction(AliasActions.add().alias("security_alias1").index("*")); final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(allAccessUser, IndicesAliasesAction.NAME); List indices = resolveIndices(aliasesRequest, authorizedIndices).getLocal(); - assertThat(indices, not(hasItem(SecurityLifecycleServiceField.SECURITY_INDEX_NAME))); + assertThat(indices, not(hasItem(SECURITY_INDEX_NAME))); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 7c732cd7c52f9..ff9d93b3ba818 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; import java.util.Arrays; @@ -451,6 +452,10 @@ public void testCustomRolesProvidersLicensing() { assertEquals(0, role.indices().groups().length); } + private SecurityIndexManager.State dummyState(ClusterHealthStatus indexStatus) { + return new SecurityIndexManager.State(true, true, true, true, null, indexStatus); + } + public void testCacheClearOnIndexHealthChange() { final AtomicInteger numInvalidation = new AtomicInteger(0); @@ -465,34 +470,34 @@ public void invalidateAll() { int expectedInvalidation = 0; // existing to no longer present - ClusterIndexHealth previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - ClusterIndexHealth currentHealth = null; - compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + SecurityIndexManager.State previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + SecurityIndexManager.State currentState = dummyState(null); + compositeRolesStore.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // doesn't exist to exists - previousHealth = null; - currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(null); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + compositeRolesStore.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // green or yellow to red - previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentHealth = getClusterIndexHealth(ClusterHealthStatus.RED); - compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(ClusterHealthStatus.RED); + compositeRolesStore.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); // red to non red - previousHealth = getClusterIndexHealth(ClusterHealthStatus.RED); - currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(ClusterHealthStatus.RED); + currentState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + compositeRolesStore.onSecurityIndexStateChange(previousState, currentState); assertEquals(++expectedInvalidation, numInvalidation.get()); // green to yellow or yellow to green - previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); - currentHealth = getClusterIndexHealth( - previousHealth.getStatus() == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); - compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + previousState = dummyState(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentState = dummyState(previousState.indexStatus == ClusterHealthStatus.GREEN ? + ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); + compositeRolesStore.onSecurityIndexStateChange(previousState, currentState); assertEquals(expectedInvalidation, numInvalidation.get()); } @@ -508,10 +513,14 @@ public void invalidateAll() { } }; - compositeRolesStore.onSecurityIndexOutOfDateChange(false, true); + compositeRolesStore.onSecurityIndexStateChange( + new SecurityIndexManager.State(true, false, true, true, null, null), + new SecurityIndexManager.State(true, true, true, true, null, null)); assertEquals(1, numInvalidation.get()); - compositeRolesStore.onSecurityIndexOutOfDateChange(true, false); + compositeRolesStore.onSecurityIndexStateChange( + new SecurityIndexManager.State(true, true, true, true, null, null), + new SecurityIndexManager.State(true, false, true, true, null, null)); assertEquals(2, numInvalidation.get()); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 9e9614a88b106..ab6664b53b0fb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -38,12 +38,12 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.audit.index.IndexAuditTrail; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.test.SecurityTestUtils; import org.junit.After; import org.junit.Before; @@ -58,7 +58,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; @@ -260,7 +260,7 @@ private ClusterState getClusterStateWithSecurityIndex() { .build(); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder(securityIndexName).settings(settings)) - .put(new IndexTemplateMetaData(SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME, 0, 0, + .put(new IndexTemplateMetaData(SecurityIndexManager.SECURITY_TEMPLATE_NAME, 0, 0, Collections.singletonList(securityIndexName), Settings.EMPTY, ImmutableOpenMap.of(), ImmutableOpenMap.of(), ImmutableOpenMap.of())) .build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index e85c8629f2c80..fe51f2beca34d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -52,15 +51,18 @@ import org.junit.Before; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_TEMPLATE_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.TEMPLATE_VERSION_PATTERN; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class SecurityIndexManagerTests extends ESTestCase { - private static final ClusterName CLUSTER_NAME = new ClusterName("index-lifecycle-manager-tests"); + private static final ClusterName CLUSTER_NAME = new ClusterName("security-index-manager-tests"); private static final ClusterState EMPTY_CLUSTER_STATE = new ClusterState.Builder(CLUSTER_NAME).build(); - public static final String INDEX_NAME = "SecurityIndexManagerTests"; + public static final String INDEX_NAME = ".security"; private static final String TEMPLATE_NAME = "SecurityIndexManagerTests-template"; private SecurityIndexManager manager; private Map, Map>> actions; @@ -127,29 +129,14 @@ private ClusterChangedEvent event(ClusterState.Builder clusterStateBuilder) { public void testIndexHealthChangeListeners() throws Exception { final AtomicBoolean listenerCalled = new AtomicBoolean(false); - final AtomicReference previousHealth = new AtomicReference<>(); - final AtomicReference currentHealth = new AtomicReference<>(); - final BiConsumer listener = (prevState, state) -> { - previousHealth.set(prevState); - currentHealth.set(state); + final AtomicReference previousState = new AtomicReference<>(); + final AtomicReference currentState = new AtomicReference<>(); + final BiConsumer listener = (prevState, state) -> { + previousState.set(prevState); + currentState.set(state); listenerCalled.set(true); }; - - if (randomBoolean()) { - if (randomBoolean()) { - manager.addIndexHealthChangeListener(listener); - manager.addIndexHealthChangeListener((prevState, state) -> { - throw new RuntimeException("throw after listener"); - }); - } else { - manager.addIndexHealthChangeListener((prevState, state) -> { - throw new RuntimeException("throw before listener"); - }); - manager.addIndexHealthChangeListener(listener); - } - } else { - manager.addIndexHealthChangeListener(listener); - } + manager.addIndexStateListener(listener); // index doesn't exist and now exists final ClusterState.Builder clusterStateBuilder = createClusterState(INDEX_NAME, TEMPLATE_NAME); @@ -157,26 +144,26 @@ public void testIndexHealthChangeListeners() throws Exception { manager.clusterChanged(event(clusterStateBuilder)); assertTrue(listenerCalled.get()); - assertNull(previousHealth.get()); - assertEquals(ClusterHealthStatus.GREEN, currentHealth.get().getStatus()); + assertNull(previousState.get().indexStatus); + assertEquals(ClusterHealthStatus.GREEN, currentState.get().indexStatus); // reset and call with no change to the index listenerCalled.set(false); - previousHealth.set(null); - currentHealth.set(null); + previousState.set(null); + currentState.set(null); ClusterChangedEvent event = new ClusterChangedEvent("same index health", clusterStateBuilder.build(), clusterStateBuilder.build()); manager.clusterChanged(event); assertFalse(listenerCalled.get()); - assertNull(previousHealth.get()); - assertNull(currentHealth.get()); + assertNull(previousState.get()); + assertNull(currentState.get()); // index with different health listenerCalled.set(false); - previousHealth.set(null); - currentHealth.set(null); - ClusterState previousState = clusterStateBuilder.build(); - Index prevIndex = previousState.getRoutingTable().index(INDEX_NAME).getIndex(); + previousState.set(null); + currentState.set(null); + ClusterState previousClusterState = clusterStateBuilder.build(); + Index prevIndex = previousClusterState.getRoutingTable().index(INDEX_NAME).getIndex(); clusterStateBuilder.routingTable(RoutingTable.builder() .add(IndexRoutingTable.builder(prevIndex) .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(prevIndex, 0)) @@ -189,29 +176,30 @@ public void testIndexHealthChangeListeners() throws Exception { - event = new ClusterChangedEvent("different index health", clusterStateBuilder.build(), previousState); + event = new ClusterChangedEvent("different index health", clusterStateBuilder.build(), previousClusterState); manager.clusterChanged(event); assertTrue(listenerCalled.get()); - assertEquals(ClusterHealthStatus.GREEN, previousHealth.get().getStatus()); - assertEquals(ClusterHealthStatus.RED, currentHealth.get().getStatus()); + assertEquals(ClusterHealthStatus.GREEN, previousState.get().indexStatus); + assertEquals(ClusterHealthStatus.RED, currentState.get().indexStatus); // swap prev and current listenerCalled.set(false); - previousHealth.set(null); - currentHealth.set(null); - event = new ClusterChangedEvent("different index health swapped", previousState, clusterStateBuilder.build()); + previousState.set(null); + currentState.set(null); + event = new ClusterChangedEvent("different index health swapped", previousClusterState, clusterStateBuilder.build()); manager.clusterChanged(event); assertTrue(listenerCalled.get()); - assertEquals(ClusterHealthStatus.RED, previousHealth.get().getStatus()); - assertEquals(ClusterHealthStatus.GREEN, currentHealth.get().getStatus()); + assertEquals(ClusterHealthStatus.RED, previousState.get().indexStatus); + assertEquals(ClusterHealthStatus.GREEN, currentState.get().indexStatus); } public void testIndexOutOfDateListeners() throws Exception { final AtomicBoolean listenerCalled = new AtomicBoolean(false); manager.clusterChanged(event(new ClusterState.Builder(CLUSTER_NAME))); - manager.addIndexOutOfDateListener((prev, current) -> { + AtomicBoolean upToDateChanged = new AtomicBoolean(); + manager.addIndexStateListener((prev, current) -> { listenerCalled.set(true); - assertNotEquals(prev, current); + upToDateChanged.set(prev.isIndexUpToDate != current.isIndexUpToDate); }); assertTrue(manager.isIndexUpToDate()); @@ -225,12 +213,14 @@ public void testIndexOutOfDateListeners() throws Exception { markShardsAvailable(clusterStateBuilder); manager.clusterChanged(event(clusterStateBuilder)); assertTrue(listenerCalled.get()); + assertTrue(upToDateChanged.get()); assertFalse(manager.isIndexUpToDate()); listenerCalled.set(false); assertFalse(listenerCalled.get()); manager.clusterChanged(event(new ClusterState.Builder(CLUSTER_NAME))); assertTrue(listenerCalled.get()); + assertTrue(upToDateChanged.get()); assertTrue(manager.isIndexUpToDate()); listenerCalled.set(false); @@ -238,7 +228,8 @@ public void testIndexOutOfDateListeners() throws Exception { clusterStateBuilder = createClusterState(INDEX_NAME, TEMPLATE_NAME, SecurityIndexManager.INTERNAL_INDEX_FORMAT); markShardsAvailable(clusterStateBuilder); manager.clusterChanged(event(clusterStateBuilder)); - assertFalse(listenerCalled.get()); + assertTrue(listenerCalled.get()); + assertFalse(upToDateChanged.get()); assertTrue(manager.isIndexUpToDate()); } @@ -324,4 +315,139 @@ private static String loadTemplate(String templateName) { final String resource = "/" + templateName + ".json"; return TemplateUtils.loadTemplate(resource, Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN); } + + public void testMappingVersionMatching() throws IOException { + String templateString = "/" + SECURITY_TEMPLATE_NAME + ".json"; + ClusterState.Builder clusterStateBuilder = createClusterStateWithMappingAndTemplate(templateString); + manager.clusterChanged(new ClusterChangedEvent("test-event", clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); + assertTrue(manager.checkMappingVersion(Version.CURRENT.minimumIndexCompatibilityVersion()::before)); + assertFalse(manager.checkMappingVersion(Version.CURRENT.minimumIndexCompatibilityVersion()::after)); + } + + public void testMissingVersionMappingThrowsError() throws IOException { + String templateString = "/missing-version-" + SECURITY_TEMPLATE_NAME + ".json"; + ClusterState.Builder clusterStateBuilder = createClusterStateWithMappingAndTemplate(templateString); + final ClusterState clusterState = clusterStateBuilder.build(); + IllegalStateException exception = expectThrows(IllegalStateException.class, + () -> SecurityIndexManager.checkIndexMappingVersionMatches(SECURITY_INDEX_NAME, clusterState, logger, Version.CURRENT::equals)); + assertEquals("Cannot read security-version string in index " + SECURITY_INDEX_NAME, exception.getMessage()); + } + + public void testIndexTemplateIsIdentifiedAsUpToDate() throws IOException { + ClusterState.Builder clusterStateBuilder = createClusterStateWithTemplate( + "/" + SECURITY_TEMPLATE_NAME + ".json" + ); + manager.clusterChanged(new ClusterChangedEvent("test-event", clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); + // No upgrade actions run + assertThat(actions.size(), equalTo(0)); + } + + public void testIndexTemplateVersionMatching() throws Exception { + String templateString = "/" + SECURITY_TEMPLATE_NAME + ".json"; + ClusterState.Builder clusterStateBuilder = createClusterStateWithTemplate(templateString); + final ClusterState clusterState = clusterStateBuilder.build(); + + assertTrue(SecurityIndexManager.checkTemplateExistsAndVersionMatches( + SecurityIndexManager.SECURITY_TEMPLATE_NAME, clusterState, logger, + Version.V_5_0_0::before)); + assertFalse(SecurityIndexManager.checkTemplateExistsAndVersionMatches( + SecurityIndexManager.SECURITY_TEMPLATE_NAME, clusterState, logger, + Version.V_5_0_0::after)); + } + + public void testUpToDateMappingsAreIdentifiedAsUpToDate() throws IOException { + String securityTemplateString = "/" + SECURITY_TEMPLATE_NAME + ".json"; + ClusterState.Builder clusterStateBuilder = createClusterStateWithMappingAndTemplate(securityTemplateString); + manager.clusterChanged(new ClusterChangedEvent("test-event", + clusterStateBuilder.build(), EMPTY_CLUSTER_STATE)); + assertThat(actions.size(), equalTo(0)); + } + + public void testMissingIndexIsIdentifiedAsUpToDate() throws IOException { + final ClusterName clusterName = new ClusterName("test-cluster"); + final ClusterState.Builder clusterStateBuilder = ClusterState.builder(clusterName); + String mappingString = "/" + SECURITY_TEMPLATE_NAME + ".json"; + IndexTemplateMetaData.Builder templateMeta = getIndexTemplateMetaData(SECURITY_TEMPLATE_NAME, mappingString); + MetaData.Builder builder = new MetaData.Builder(clusterStateBuilder.build().getMetaData()); + builder.put(templateMeta); + clusterStateBuilder.metaData(builder); + manager.clusterChanged(new ClusterChangedEvent("test-event", clusterStateBuilder.build() + , EMPTY_CLUSTER_STATE)); + assertThat(actions.size(), equalTo(0)); + } + + private ClusterState.Builder createClusterStateWithTemplate(String securityTemplateString) throws IOException { + // add the correct mapping no matter what the template + ClusterState clusterState = createClusterStateWithIndex("/" + SECURITY_TEMPLATE_NAME + ".json").build(); + final MetaData.Builder metaDataBuilder = new MetaData.Builder(clusterState.metaData()); + metaDataBuilder.put(getIndexTemplateMetaData(SECURITY_TEMPLATE_NAME, securityTemplateString)); + return ClusterState.builder(clusterState).metaData(metaDataBuilder); + } + + private ClusterState.Builder createClusterStateWithMapping(String securityTemplateString) throws IOException { + final ClusterState clusterState = createClusterStateWithIndex(securityTemplateString).build(); + final String indexName = clusterState.metaData().getAliasAndIndexLookup() + .get(SECURITY_INDEX_NAME).getIndices().get(0).getIndex().getName(); + return ClusterState.builder(clusterState).routingTable(SecurityTestUtils.buildIndexRoutingTable(indexName)); + } + + private ClusterState.Builder createClusterStateWithMappingAndTemplate(String securityTemplateString) throws IOException { + ClusterState.Builder clusterStateBuilder = createClusterStateWithMapping(securityTemplateString); + MetaData.Builder metaDataBuilder = new MetaData.Builder(clusterStateBuilder.build().metaData()); + String securityMappingString = "/" + SECURITY_TEMPLATE_NAME + ".json"; + IndexTemplateMetaData.Builder securityTemplateMeta = getIndexTemplateMetaData(SECURITY_TEMPLATE_NAME, securityMappingString); + metaDataBuilder.put(securityTemplateMeta); + return clusterStateBuilder.metaData(metaDataBuilder); + } + + private static IndexMetaData.Builder createIndexMetadata(String indexName, String templateString) throws IOException { + String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), + SecurityIndexManager.TEMPLATE_VERSION_PATTERN); + PutIndexTemplateRequest request = new PutIndexTemplateRequest(); + request.source(template, XContentType.JSON); + IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); + indexMetaData.settings(Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .build()); + + for (Map.Entry entry : request.mappings().entrySet()) { + indexMetaData.putMapping(entry.getKey(), entry.getValue()); + } + return indexMetaData; + } + + private ClusterState.Builder createClusterStateWithIndex(String securityTemplate) throws IOException { + final MetaData.Builder metaDataBuilder = new MetaData.Builder(); + final boolean withAlias = randomBoolean(); + final String securityIndexName = SECURITY_INDEX_NAME + (withAlias ? "-" + randomAlphaOfLength(5) : ""); + metaDataBuilder.put(createIndexMetadata(securityIndexName, securityTemplate)); + + ClusterState.Builder clusterStateBuilder = ClusterState.builder(state()); + if (withAlias) { + // try with .security index as an alias + clusterStateBuilder.metaData(SecurityTestUtils.addAliasToMetaData(metaDataBuilder.build(), securityIndexName)); + } else { + // try with .security index as a concrete index + clusterStateBuilder.metaData(metaDataBuilder); + } + + clusterStateBuilder.routingTable(SecurityTestUtils.buildIndexRoutingTable(securityIndexName)); + return clusterStateBuilder; + } + + private static IndexTemplateMetaData.Builder getIndexTemplateMetaData(String templateName, String templateString) throws IOException { + + String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString(), + SecurityIndexManager.TEMPLATE_VERSION_PATTERN); + PutIndexTemplateRequest request = new PutIndexTemplateRequest(); + request.source(template, XContentType.JSON); + IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder(templateName) + .patterns(Arrays.asList(generateRandomStringArray(10, 100, false, false))); + for (Map.Entry entry : request.mappings().entrySet()) { + templateBuilder.putMapping(entry.getKey(), entry.getValue()); + } + return templateBuilder; + } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java index b4871f90bd488..63c267eb816fc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java @@ -40,7 +40,7 @@ import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; import static java.nio.file.StandardOpenOption.WRITE; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; import static org.junit.Assert.assertEquals; public class SecurityTestUtils { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java index 5570aeebb94de..414d04e42323a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/XPackUserTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.index.IndexAuditTrailField; import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.SecurityLifecycleService; @@ -20,6 +19,8 @@ import java.util.function.Predicate; +import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; + public class XPackUserTests extends ESTestCase { public void testXPackUserCanAccessNonSecurityIndices() { @@ -32,7 +33,7 @@ public void testXPackUserCanAccessNonSecurityIndices() { public void testXPackUserCannotAccessSecurityIndex() { final String action = randomFrom(GetAction.NAME, SearchAction.NAME, IndexAction.NAME); final Predicate predicate = XPackUser.ROLE.indices().allowedIndicesMatcher(action); - assertThat(predicate.test(SecurityLifecycleServiceField.SECURITY_INDEX_NAME), Matchers.is(false)); + assertThat(predicate.test(SECURITY_INDEX_NAME), Matchers.is(false)); assertThat(predicate.test(SecurityLifecycleService.INTERNAL_SECURITY_INDEX), Matchers.is(false)); } diff --git a/x-pack/plugin/security/src/test/resources/SecurityIndexManagerTests-template.json b/x-pack/plugin/security/src/test/resources/SecurityIndexManagerTests-template.json index d9a53e4622f5f..0957b1da7ec70 100644 --- a/x-pack/plugin/security/src/test/resources/SecurityIndexManagerTests-template.json +++ b/x-pack/plugin/security/src/test/resources/SecurityIndexManagerTests-template.json @@ -1,5 +1,5 @@ { - "index_patterns": "IndexLifeCycleManagerTests", + "index_patterns": ".security", "mappings": { "doc": { "_meta": { diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 95c9ade5e295d..c024af48187d3 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -25,8 +25,8 @@ public enum DataType { SHORT( JDBCType.SMALLINT, Short.class, Short.BYTES, 5, 6, true, false, true), INTEGER( JDBCType.INTEGER, Integer.class, Integer.BYTES, 10, 11, true, false, true), LONG( JDBCType.BIGINT, Long.class, Long.BYTES, 19, 20, true, false, true), - // 53 bits defaultPrecision ~ 16(15.95) decimal digits (53log10(2)), - DOUBLE( JDBCType.DOUBLE, Double.class, Double.BYTES, 16, 25, false, true, true), + // 53 bits defaultPrecision ~ 15(15.95) decimal digits (53log10(2)), + DOUBLE( JDBCType.DOUBLE, Double.class, Double.BYTES, 15, 25, false, true, true), // 24 bits defaultPrecision - 24*log10(2) =~ 7 (7.22) FLOAT( JDBCType.REAL, Float.class, Float.BYTES, 7, 15, false, true, true), HALF_FLOAT( JDBCType.FLOAT, Double.class, Double.BYTES, 16, 25, false, true, true), @@ -37,7 +37,10 @@ public enum DataType { OBJECT( JDBCType.STRUCT, null, -1, 0, 0), NESTED( JDBCType.STRUCT, null, -1, 0, 0), BINARY( JDBCType.VARBINARY, byte[].class, -1, Integer.MAX_VALUE, 0), - DATE( JDBCType.TIMESTAMP, Timestamp.class, Long.BYTES, 19, 20); + // since ODBC and JDBC interpret precision for Date as display size, + // the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone) + // see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288 + DATE( JDBCType.TIMESTAMP, Timestamp.class, Long.BYTES, 24, 24); // @formatter:on private static final Map jdbcToEs; @@ -75,7 +78,7 @@ public enum DataType { *

* Specified column size. For numeric data, this is the maximum precision. For character * data, this is the length in characters. For datetime datatypes, this is the length in characters of the - * String representation (assuming the maximum allowed defaultPrecision of the fractional seconds component). + * String representation (assuming the maximum allowed defaultPrecision of the fractional milliseconds component). */ public final int defaultPrecision; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java index fb08d08fcb926..bf432a7236357 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java @@ -148,6 +148,7 @@ public Object visitSysCatalogs(SysCatalogsContext ctx) { @Override public SysTables visitSysTables(SysTablesContext ctx) { List types = new ArrayList<>(); + boolean legacyTableType = false; for (StringContext string : ctx.string()) { String value = string(string); if (value != null) { @@ -156,6 +157,12 @@ public SysTables visitSysTables(SysTablesContext ctx) { // since % is the same as not specifying a value, choose // https://docs.microsoft.com/en-us/sql/odbc/reference/develop-app/value-list-arguments?view=ssdt-18vs2017 // that is skip the value + } + // special case for legacy apps (like msquery) that always asks for 'TABLE' + // which we manually map to all concrete tables supported + else if (value.toUpperCase(Locale.ROOT).equals("TABLE")) { + legacyTableType = true; + types.add(IndexType.INDEX); } else { IndexType type = IndexType.from(value); types.add(type); @@ -165,7 +172,7 @@ public SysTables visitSysTables(SysTablesContext ctx) { // if the ODBC enumeration is specified, skip validation EnumSet set = types.isEmpty() ? null : EnumSet.copyOf(types); - return new SysTables(source(ctx), visitPattern(ctx.clusterPattern), visitPattern(ctx.tablePattern), set); + return new SysTables(source(ctx), visitPattern(ctx.clusterPattern), visitPattern(ctx.tablePattern), set, legacyTableType); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java index 3c01736cebe89..8005ce0758981 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumns.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; import org.elasticsearch.xpack.sql.type.EsField; import java.sql.DatabaseMetaData; @@ -29,7 +30,6 @@ import static java.util.Arrays.asList; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; -import static org.elasticsearch.xpack.sql.type.DataType.NULL; import static org.elasticsearch.xpack.sql.type.DataType.SHORT; /** @@ -133,11 +133,7 @@ static void fillInRows(String clusterName, String indexName, Map output() { @Override public final void execute(SqlSession session, ActionListener listener) { listener.onResponse(Rows.of(output(), IndexType.VALID.stream() + // *DBC requires ascending order + .sorted(Comparator.comparing(t -> t.toSql())) .map(t -> singletonList(t.toSql())) .collect(toList()))); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java index 2b8e5e8527c31..eb6f6a36b5528 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTables.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command.sys; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.sql.analysis.index.IndexResolver.IndexInfo; import org.elasticsearch.xpack.sql.analysis.index.IndexResolver.IndexType; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.regex.LikePattern; @@ -18,6 +19,7 @@ import org.elasticsearch.xpack.sql.util.CollectionUtils; import java.util.ArrayList; +import java.util.Comparator; import java.util.EnumSet; import java.util.List; import java.util.Objects; @@ -33,17 +35,21 @@ public class SysTables extends Command { private final LikePattern pattern; private final LikePattern clusterPattern; private final EnumSet types; + // flag indicating whether tables are reported as `TABLE` or `BASE TABLE` + private final boolean legacyTableTypes; - public SysTables(Location location, LikePattern clusterPattern, LikePattern pattern, EnumSet types) { + public SysTables(Location location, LikePattern clusterPattern, LikePattern pattern, EnumSet types, + boolean legacyTableTypes) { super(location); this.clusterPattern = clusterPattern; this.pattern = pattern; this.types = types; + this.legacyTableTypes = legacyTableTypes; } @Override protected NodeInfo info() { - return NodeInfo.create(this, SysTables::new, clusterPattern, pattern, types); + return NodeInfo.create(this, SysTables::new, clusterPattern, pattern, types, legacyTableTypes); } @Override @@ -89,6 +95,8 @@ public final void execute(SqlSession session, ActionListener liste enumeration[3] = type.toSql(); values.add(asList(enumeration)); } + + values.sort(Comparator.comparing(l -> l.get(3).toString())); listener.onResponse(Rows.of(output(), values)); return; } @@ -108,10 +116,13 @@ public final void execute(SqlSession session, ActionListener liste session.indexResolver().resolveNames(index, regex, types, ActionListener.wrap(result -> listener.onResponse( Rows.of(output(), result.stream() + // sort by type (which might be legacy), then by name + .sorted(Comparator. comparing(i -> legacyName(i.type())) + .thenComparing(Comparator.comparing(i -> i.name()))) .map(t -> asList(cluster, EMPTY, t.name(), - t.type().toSql(), + legacyName(t.type()), EMPTY, null, null, @@ -122,6 +133,10 @@ public final void execute(SqlSession session, ActionListener liste , listener::onFailure)); } + private String legacyName(IndexType indexType) { + return legacyTableTypes && indexType == IndexType.INDEX ? "TABLE" : indexType.toSql(); + } + @Override public int hashCode() { return Objects.hash(clusterPattern, pattern, types); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java index 508ffef530573..ab40b076fac85 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypes; import java.sql.DatabaseMetaData; import java.util.Comparator; @@ -67,9 +68,10 @@ public List output() { public final void execute(SqlSession session, ActionListener listener) { List> rows = Stream.of(DataType.values()) // sort by SQL int type (that's what the JDBC/ODBC specs want) - .sorted(Comparator.comparing(t -> t.jdbcType)) + .sorted(Comparator.comparing(t -> t.jdbcType.getVendorTypeNumber())) .map(t -> asList(t.esType.toUpperCase(Locale.ROOT), t.jdbcType.getVendorTypeNumber(), + //https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/column-size?view=sql-server-2017 t.defaultPrecision, "'", "'", @@ -83,16 +85,17 @@ public final void execute(SqlSession session, ActionListener liste // only numerics are signed !t.isSigned(), //no fixed precision scale SQL_FALSE - false, - null, - null, - null, + Boolean.FALSE, + // not auto-incremented + Boolean.FALSE, null, + DataTypes.metaSqlMinimumScale(t), + DataTypes.metaSqlMaximumScale(t), // SQL_DATA_TYPE - ODBC wants this to be not null - 0, - null, + DataTypes.metaSqlDataType(t), + DataTypes.metaSqlDateTimeSub(t), // Radix - t.isInteger ? Integer.valueOf(10) : (t.isRational ? Integer.valueOf(2) : null), + DataTypes.metaSqlRadix(t), null )) .collect(toList()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index c0f4947bb88b3..c0bc9b6e52908 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -31,7 +31,7 @@ */ public abstract class DataTypeConversion { - private static final DateTimeFormatter UTC_DATE_FORMATTER = ISODateTimeFormat.dateTimeNoMillis().withZoneUTC(); + private static final DateTimeFormatter UTC_DATE_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC(); /** * Returns the type compatible with both left and right types diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java index c2b40656ba294..6fc7f95bef71e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java @@ -51,4 +51,71 @@ public static DataType fromJava(Object value) { } throw new SqlIllegalArgumentException("No idea what's the DataType for {}", value.getClass()); } -} + + // + // Metadata methods, mainly for ODBC. + // As these are fairly obscure and limited in use, there is no point to promote them as a full type methods + // hence why they appear here as utility methods. + // + + // https://docs.microsoft.com/en-us/sql/relational-databases/native-client-odbc-date-time/metadata-catalog + // https://github.com/elastic/elasticsearch/issues/30386 + public static Integer metaSqlDataType(DataType t) { + if (t == DataType.DATE) { + // ODBC SQL_DATETME + return Integer.valueOf(9); + } + // this is safe since the vendor SQL types are short despite the return value + return t.jdbcType.getVendorTypeNumber(); + } + + // https://github.com/elastic/elasticsearch/issues/30386 + // https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function?view=sql-server-2017 + public static Integer metaSqlDateTimeSub(DataType t) { + if (t == DataType.DATE) { + // ODBC SQL_CODE_TIMESTAMP + return Integer.valueOf(3); + } + // ODBC null + return 0; + } + + // https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/decimal-digits?view=sql-server-2017 + public static Short metaSqlMinimumScale(DataType t) { + // TODO: return info for HALF/SCALED_FLOATS (should be based on field not type) + if (t == DataType.DATE) { + return Short.valueOf((short) 3); + } + if (t.isInteger) { + return Short.valueOf((short) 0); + } + // minimum scale? + if (t.isRational) { + return Short.valueOf((short) 0); + } + return null; + } + + public static Short metaSqlMaximumScale(DataType t) { + // TODO: return info for HALF/SCALED_FLOATS (should be based on field not type) + if (t == DataType.DATE) { + return Short.valueOf((short) 3); + } + if (t.isInteger) { + return Short.valueOf((short) 0); + } + if (t.isRational) { + return Short.valueOf((short) t.defaultPrecision); + } + return null; + } + + // https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function?view=sql-server-2017 + public static Integer metaSqlRadix(DataType t) { + // RADIX - Determines how numbers returned by COLUMN_SIZE and DECIMAL_DIGITS should be interpreted. + // 10 means they represent the number of decimal digits allowed for the column. + // 2 means they represent the number of bits allowed for the column. + // null means radix is not applicable for the given type. + return t.isInteger ? Integer.valueOf(10) : (t.isRational ? Integer.valueOf(2) : null); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java index b9737fbba608f..04926db5407f5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java @@ -25,13 +25,6 @@ public DateEsField(String name, Map properties, boolean hasDocV this.formats = CollectionUtils.isEmpty(formats) ? DEFAULT_FORMAT : Arrays.asList(formats); } - @Override - public int getPrecision() { - // same as Long - // TODO: based this on format string - return 19; - } - public List getFormats() { return formats; } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java index bddddc6941cbb..0b82530022386 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysColumnsTests.java @@ -38,6 +38,13 @@ public void testSysColumns() { assertEquals(null, radix(row)); assertEquals(Integer.MAX_VALUE, bufferLength(row)); + row = rows.get(4); + assertEquals("date", name(row)); + assertEquals(Types.TIMESTAMP, sqlType(row)); + assertEquals(null, radix(row)); + assertEquals(24, precision(row)); + assertEquals(8, bufferLength(row)); + row = rows.get(7); assertEquals("some.dotted", name(row)); assertEquals(Types.STRUCT, sqlType(row)); @@ -59,6 +66,10 @@ private static Object sqlType(List list) { return list.get(4); } + private static Object precision(List list) { + return list.get(6); + } + private static Object bufferLength(List list) { return list.get(7); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java index ac72bcba4d647..27ed27413110f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java @@ -57,8 +57,8 @@ private Tuple sql(String sql) { public void testSysTypes() throws Exception { Command cmd = sql("SYS TYPES").v1(); - List names = asList("BYTE", "SHORT", "INTEGER", "LONG", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", "KEYWORD", "TEXT", - "DATE", "BINARY", "NULL", "UNSUPPORTED", "OBJECT", "NESTED", "BOOLEAN"); + List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", + "KEYWORD", "TEXT", "BOOLEAN", "DATE", "UNSUPPORTED", "OBJECT", "NESTED"); cmd.execute(null, ActionListener.wrap(r -> { assertEquals(19, r.columnCount()); @@ -68,6 +68,8 @@ public void testSysTypes() throws Exception { assertFalse(r.column(9, Boolean.class)); // make sure precision is returned as boolean (not int) assertFalse(r.column(10, Boolean.class)); + // no auto-increment + assertFalse(r.column(11, Boolean.class)); for (int i = 0; i < r.size(); i++) { assertEquals(names.get(i), r.column(0)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java index 956273b9aae2d..291f9ee244e5f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTableTypesTests.java @@ -41,9 +41,9 @@ public void testSysCatalogs() throws Exception { sql.v1().execute(sql.v2(), ActionListener.wrap(r -> { assertEquals(2, r.size()); - assertEquals("BASE TABLE", r.column(0)); - r.advanceRow(); assertEquals("ALIAS", r.column(0)); + r.advanceRow(); + assertEquals("BASE TABLE", r.column(0)); }, ex -> fail(ex.getMessage()))); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java index fe36095641a60..c08c423be34eb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; +import java.util.Comparator; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; @@ -57,30 +58,30 @@ public void testSysTablesDifferentCatalog() throws Exception { public void testSysTablesNoTypes() throws Exception { executeCommand("SYS TABLES", r -> { + assertEquals("alias", r.column(2)); + assertTrue(r.advanceRow()); assertEquals(2, r.size()); assertEquals("test", r.column(2)); - assertTrue(r.advanceRow()); - assertEquals("alias", r.column(2)); }, index, alias); } public void testSysTablesPattern() throws Exception { executeCommand("SYS TABLES LIKE '%'", r -> { + assertEquals("alias", r.column(2)); + assertTrue(r.advanceRow()); assertEquals(2, r.size()); assertEquals("test", r.column(2)); - assertTrue(r.advanceRow()); - assertEquals("alias", r.column(2)); }, index, alias); } public void testSysTablesPatternParameterized() throws Exception { List params = asList(param("%")); executeCommand("SYS TABLES LIKE ?", params, r -> { + assertEquals("alias", r.column(2)); + assertTrue(r.advanceRow()); assertEquals(2, r.size()); assertEquals("test", r.column(2)); - assertTrue(r.advanceRow()); - assertEquals("alias", r.column(2)); - }, index, alias); + }, alias, index); } public void testSysTablesOnlyAliases() throws Exception { @@ -105,6 +106,23 @@ public void testSysTablesOnlyIndices() throws Exception { }, index); } + public void testSysTablesOnlyIndicesInLegacyMode() throws Exception { + executeCommand("SYS TABLES LIKE 'test' TYPE 'TABLE'", r -> { + assertEquals(1, r.size()); + assertEquals("test", r.column(2)); + assertEquals("TABLE", r.column(3)); + }, index); + + } + + public void testSysTablesOnlyIndicesLegacyModeParameterized() throws Exception { + executeCommand("SYS TABLES LIKE 'test' TYPE ?", asList(param("TABLE")), r -> { + assertEquals(1, r.size()); + assertEquals("test", r.column(2)); + assertEquals("TABLE", r.column(3)); + }, index); + } + public void testSysTablesOnlyIndicesParameterized() throws Exception { executeCommand("SYS TABLES LIKE 'test' TYPE ?", asList(param("ALIAS")), r -> { assertEquals(1, r.size()); @@ -114,20 +132,32 @@ public void testSysTablesOnlyIndicesParameterized() throws Exception { public void testSysTablesOnlyIndicesAndAliases() throws Exception { executeCommand("SYS TABLES LIKE 'test' TYPE 'ALIAS', 'BASE TABLE'", r -> { + assertEquals("alias", r.column(2)); + assertTrue(r.advanceRow()); assertEquals(2, r.size()); assertEquals("test", r.column(2)); - assertTrue(r.advanceRow()); - assertEquals("alias", r.column(2)); }, index, alias); } public void testSysTablesOnlyIndicesAndAliasesParameterized() throws Exception { List params = asList(param("ALIAS"), param("BASE TABLE")); executeCommand("SYS TABLES LIKE 'test' TYPE ?, ?", params, r -> { + assertEquals("alias", r.column(2)); + assertTrue(r.advanceRow()); assertEquals(2, r.size()); assertEquals("test", r.column(2)); - assertTrue(r.advanceRow()); + }, index, alias); + } + + public void testSysTablesOnlyIndicesLegacyAndAliasesParameterized() throws Exception { + List params = asList(param("ALIAS"), param("TABLE")); + executeCommand("SYS TABLES LIKE 'test' TYPE ?, ?", params, r -> { assertEquals("alias", r.column(2)); + assertEquals("ALIAS", r.column(3)); + assertTrue(r.advanceRow()); + assertEquals(2, r.size()); + assertEquals("test", r.column(2)); + assertEquals("TABLE", r.column(3)); }, index, alias); } @@ -159,7 +189,7 @@ public void testSysTablesTypesEnumeration() throws Exception { executeCommand("SYS TABLES CATALOG LIKE '' LIKE '' TYPE '%'", r -> { assertEquals(2, r.size()); - Iterator it = IndexType.VALID.iterator(); + Iterator it = IndexType.VALID.stream().sorted(Comparator.comparing(IndexType::toSql)).iterator(); for (int t = 0; t < r.size(); t++) { assertEquals(it.next().toSql(), r.column(3)); @@ -180,7 +210,7 @@ public void testSysTablesTypesEnumerationWoString() throws Exception { executeCommand("SYS TABLES CATALOG LIKE '' LIKE '' ", r -> { assertEquals(2, r.size()); - Iterator it = IndexType.VALID.iterator(); + Iterator it = IndexType.VALID.stream().sorted(Comparator.comparing(IndexType::toSql)).iterator(); for (int t = 0; t < r.size(); t++) { assertEquals(it.next().toSql(), r.column(3)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index a6a322b31838f..8f5477f1951e9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -82,10 +82,15 @@ public void testConversionToDate() { Conversion conversion = DataTypeConversion.conversionFor(DataType.KEYWORD, to); assertNull(conversion.convert(null)); - // TODO we'd like to be able to optionally parse millis here I think.... assertEquals(new DateTime(1000L, DateTimeZone.UTC), conversion.convert("1970-01-01T00:00:01Z")); assertEquals(new DateTime(1483228800000L, DateTimeZone.UTC), conversion.convert("2017-01-01T00:00:00Z")); assertEquals(new DateTime(18000000L, DateTimeZone.UTC), conversion.convert("1970-01-01T00:00:00-05:00")); + + // double check back and forth conversion + DateTime dt = DateTime.now(DateTimeZone.UTC); + Conversion forward = DataTypeConversion.conversionFor(DataType.DATE, DataType.KEYWORD); + Conversion back = DataTypeConversion.conversionFor(DataType.KEYWORD, DataType.DATE); + assertEquals(dt, back.convert(forward.convert(dt))); Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [Date]:Invalid format: \"0xff\" is malformed at \"xff\"", e.getMessage()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java new file mode 100644 index 0000000000000..0a34c697bdf64 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.type; + +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; +import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.sql.type.DataType.LONG; +import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlDataType; +import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlDateTimeSub; +import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlMaximumScale; +import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlMinimumScale; +import static org.elasticsearch.xpack.sql.type.DataTypes.metaSqlRadix; + +public class DataTypesTests extends ESTestCase { + + public void testMetaDataType() { + assertEquals(Integer.valueOf(9), metaSqlDataType(DATE)); + DataType t = randomDataTypeNoDate(); + assertEquals(t.jdbcType.getVendorTypeNumber(), metaSqlDataType(t)); + } + + public void testMetaDateTypeSub() { + assertEquals(Integer.valueOf(3), metaSqlDateTimeSub(DATE)); + assertEquals(Integer.valueOf(0), metaSqlDateTimeSub(randomDataTypeNoDate())); + } + + public void testMetaMinimumScale() { + assertEquals(Short.valueOf((short) 3), metaSqlMinimumScale(DATE)); + assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(LONG)); + assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(FLOAT)); + assertNull(metaSqlMinimumScale(KEYWORD)); + } + + public void testMetaMaximumScale() { + assertEquals(Short.valueOf((short) 3), metaSqlMaximumScale(DATE)); + assertEquals(Short.valueOf((short) 0), metaSqlMaximumScale(LONG)); + assertEquals(Short.valueOf((short) FLOAT.defaultPrecision), metaSqlMaximumScale(FLOAT)); + assertNull(metaSqlMaximumScale(KEYWORD)); + } + + public void testMetaRadix() { + assertNull(metaSqlRadix(DATE)); + assertNull(metaSqlRadix(KEYWORD)); + assertEquals(Integer.valueOf(10), metaSqlRadix(LONG)); + assertEquals(Integer.valueOf(2), metaSqlRadix(FLOAT)); + } + + private DataType randomDataTypeNoDate() { + return randomValueOtherThan(DataType.DATE, () -> randomFrom(DataType.values())); + } +} + diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index c5e82123d7b8b..891b11ba70bb0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -82,7 +82,7 @@ public void testDateField() { EsField field = mapping.get("date"); assertThat(field.getDataType(), is(DATE)); assertThat(field.hasDocValues(), is(true)); - assertThat(field.getPrecision(), is(19)); + assertThat(field.getPrecision(), is(24)); DateEsField dfield = (DateEsField) field; List formats = dfield.getFormats(); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml index ea545da5f639c..1a587c47fd573 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml @@ -88,7 +88,24 @@ setup: "description": "second", "latest_record_time_stamp": "2016-06-01T00:00:00Z", "latest_result_time_stamp": "2016-06-01T00:00:00Z", - "snapshot_doc_count": 3 + "snapshot_doc_count": 3, + "model_size_stats": { + "job_id" : "delete-model-snapshot", + "result_type" : "model_size_stats", + "model_bytes" : 0, + "total_by_field_count" : 101, + "total_over_field_count" : 0, + "total_partition_field_count" : 0, + "bucket_allocation_failures_count" : 0, + "memory_status" : "ok", + "log_time" : 1495808248662, + "timestamp" : 1495808248662 + }, + "quantiles": { + "job_id": "delete-model-snapshot", + "timestamp": 1495808248662, + "quantile_state": "quantiles-1" + } } - do: @@ -106,12 +123,10 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser - xpack.ml.update_job: + xpack.ml.revert_model_snapshot: job_id: delete-model-snapshot - body: > - { - "model_snapshot_id": "active-snapshot" - } + snapshot_id: "active-snapshot" + --- "Test delete snapshot missing snapshotId": diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml index 6fa66667e2641..554e339687ba4 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml @@ -1,13 +1,17 @@ --- "Test watcher stats output": - + - skip: + version: "all" + reason: "@AwaitsFix: https://github.com/elastic/elasticsearch/issues/30298" - do: {xpack.watcher.stats: {}} - match: { "manually_stopped": false } - match: { "stats.0.watcher_state": "started" } --- "Test watcher stats supports emit_stacktraces parameter": - + - skip: + version: "all" + reason: "@AwaitsFix: https://github.com/elastic/elasticsearch/issues/30298" - do: xpack.watcher.stats: metric: "all" diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java index 3fd65a8695d65..298e5029fdf53 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java @@ -78,7 +78,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.WATCHER_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.clientWithOrigin; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.core.security.authc.esnative.NativeUserStoreField.INDEX_TYPE; import static org.elasticsearch.xpack.core.security.authc.esnative.NativeUserStoreField.RESERVED_USER_TYPE; @@ -90,6 +89,8 @@ public class Upgrade extends Plugin implements ActionPlugin { // this index setting is set by the upgrade API or automatically when a 6.0 index template is created private static final int EXPECTED_INDEX_FORMAT_VERSION = 6; + private static final String SECURITY_INDEX_NAME = ".security"; + private final Settings settings; private final List> upgradeCheckFactories; diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index 9f1fb95ed4835..ef5c3acc3d238 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.upgrade; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.search.SearchResponse; @@ -31,7 +30,6 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.core.IsEqual.equalTo; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30430") public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { @Before diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java index 7d9e91384e515..3754cc440eb64 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java @@ -511,7 +511,7 @@ public interface Field { * @param params The ToXContentParams from the parent write * @param excludeField The field to exclude * @return A bytearrayinputstream that contains the serialized request - * @throws IOException + * @throws IOException if an IOException is triggered in the underlying toXContent method */ public static InputStream filterToXContent(HttpRequest request, XContent xContent, ToXContent.Params params, String excludeField) throws IOException { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEngine.java index f370847aca965..ec63a68d3cae5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/TriggerEngine.java @@ -37,11 +37,6 @@ public interface TriggerEngine { */ void pauseExecution(); - /** - * Returns the number of active jobs currently in this trigger engine implementation - */ - int getJobCount(); - /** * Removes the job associated with the given name from this trigger engine. * diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/manual/ManualTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/manual/ManualTriggerEngine.java index 386e28501210e..2d44434206d76 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/manual/ManualTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/manual/ManualTriggerEngine.java @@ -52,11 +52,6 @@ public void add(Watch job) { public void pauseExecution() { } - @Override - public int getJobCount() { - return 0; - } - @Override public boolean remove(String jobId) { return false; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java index de8ab1d1f4bc6..05aa7cf302817 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/trigger/schedule/engine/TickerScheduleTriggerEngine.java @@ -77,11 +77,6 @@ public void add(Watch watch) { schedules.put(watch.id(), new ActiveSchedule(watch.id(), trigger.getSchedule(), clock.millis())); } - @Override - public int getJobCount() { - return schedules.size(); - } - @Override public boolean remove(String jobId) { return schedules.remove(jobId) != null; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java index 57fe40f67b4dd..58f5c8f4a26b0 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/trigger/ScheduleTriggerEngineMock.java @@ -71,11 +71,6 @@ public void pauseExecution() { watches.clear(); } - @Override - public int getJobCount() { - return watches.size(); - } - @Override public boolean remove(String jobId) { return watches.remove(jobId) != null; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index fa2f7df21a6a5..c7b8cf8c069b4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -591,11 +591,6 @@ public void add(Watch watch) { public void pauseExecution() { } - @Override - public int getJobCount() { - return 0; - } - @Override public boolean remove(String jobId) { return false; diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index fbef056b34738..820f55cf28fc2 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -146,7 +146,7 @@ subprojects { configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { dependsOn copyTestNodeKeystore if (version.before('6.3.0')) { - plugin xpackProject('plugin').path + mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}" } bwcVersion = version numBwcNodes = 2 diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index a5fc1575f484a..14bdd533c6b38 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -206,6 +206,7 @@ public void testMemoryStatus() throws Exception { assertThat(e.getMessage(), equalTo("Cannot run forecast: Forecast cannot be executed as model memory status is not OK")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/30399") public void testMemoryLimit() throws Exception { Detector.Builder detector = new Detector.Builder("mean", "value"); detector.setByFieldName("clientIP"); diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 0cd4ae6d2e20f..1a53654b38233 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -119,7 +119,7 @@ subprojects { configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { dependsOn copyTestNodeKeystore if (version.before('6.3.0')) { - plugin xpackProject('plugin').path + mavenPlugin 'x-pack', "org.elasticsearch.plugin:x-pack:${version}" } String usersCli = version.before('6.3.0') ? 'bin/x-pack/users' : 'bin/elasticsearch-users' setupCommand 'setupTestUser', usersCli, 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index a9e1ccba614ef..65b1a7c85dcb1 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.junit.Before; import java.io.IOException; @@ -67,7 +66,7 @@ protected Settings restClientSettings() { } protected Collection templatesToWaitFor() { - return Collections.singletonList(SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME); + return Collections.singletonList("security-index-template"); } @Before diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java index 61d006c9a5900..47f1527b6ba52 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java @@ -37,7 +37,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME; import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction; import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder; import static org.elasticsearch.xpack.watcher.input.InputBuilders.simpleInput; @@ -97,8 +96,7 @@ public void waitForSecuritySetup() throws Exception { Response response = client().performRequest("GET", "/_cluster/state/metadata"); ObjectPath objectPath = ObjectPath.createFromResponse(response); - final String mappingsPath = "metadata.templates." + SECURITY_TEMPLATE_NAME + "" + - ".mappings"; + final String mappingsPath = "metadata.templates.security-index-template.mappings"; Map mappings = objectPath.evaluate(mappingsPath); assertNotNull(mappings); assertThat(mappings.size(), greaterThanOrEqualTo(1)); diff --git a/x-pack/qa/sql/multinode/src/test/java/org/elasticsearch/xpack/qa/sql/multinode/RestSqlMultinodeIT.java b/x-pack/qa/sql/multinode/src/test/java/org/elasticsearch/xpack/qa/sql/multinode/RestSqlMultinodeIT.java index efd426439e0ab..32dd60cfa2dce 100644 --- a/x-pack/qa/sql/multinode/src/test/java/org/elasticsearch/xpack/qa/sql/multinode/RestSqlMultinodeIT.java +++ b/x-pack/qa/sql/multinode/src/test/java/org/elasticsearch/xpack/qa/sql/multinode/RestSqlMultinodeIT.java @@ -8,6 +8,7 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; @@ -53,7 +54,7 @@ public void testIndexOnWrongNode() throws IOException { String firstHostName = null; String match = firstHost.getHostName() + ":" + firstHost.getPort(); - Map nodesInfo = responseToMap(client().performRequest("GET", "/_nodes")); + Map nodesInfo = responseToMap(client().performRequest(new Request("GET", "/_nodes"))); @SuppressWarnings("unchecked") Map nodes = (Map) nodesInfo.get("nodes"); for (Map.Entry node : nodes.entrySet()) { @@ -74,7 +75,9 @@ public void testIndexOnWrongNode() throws IOException { } index.endObject(); index.endObject(); - client().performRequest("PUT", "/test", emptyMap(), new StringEntity(Strings.toString(index), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/test"); + request.setJsonEntity(Strings.toString(index)); + client().performRequest(request); int documents = between(10, 100); createTestData(documents); @@ -84,6 +87,9 @@ public void testIndexOnWrongNode() throws IOException { } private void createTestData(int documents) throws UnsupportedCharsetException, IOException { + Request request = new Request("PUT", "/test/test/_bulk"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); for (int i = 0; i < documents; i++) { int a = 3 * i; @@ -92,8 +98,9 @@ private void createTestData(int documents) throws UnsupportedCharsetException, I bulk.append("{\"index\":{\"_id\":\"" + i + "\"}\n"); bulk.append("{\"a\": " + a + ", \"b\": " + b + ", \"c\": " + c + "}\n"); } - client().performRequest("PUT", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + request.setJsonEntity(bulk.toString()); + + client().performRequest(request); } private Map responseToMap(Response response) throws IOException { @@ -108,14 +115,12 @@ private void assertCount(RestClient client, int count) throws IOException { expected.put("columns", singletonList(columnInfo(mode, "COUNT(1)", "long", JDBCType.BIGINT, 20))); expected.put("rows", singletonList(singletonList(count))); - Map params = new TreeMap<>(); - params.put("format", "json"); // JSON is easier to parse then a table - if (Strings.hasText(mode)) { - params.put("mode", mode); // JDBC or PLAIN mode + Request request = new Request("POST", "/_xpack/sql"); + if (false == mode.isEmpty()) { + request.addParameter("mode", mode); } - - Map actual = responseToMap(client.performRequest("POST", "/_xpack/sql", params, - new StringEntity("{\"query\": \"SELECT COUNT(*) FROM test\"}", ContentType.APPLICATION_JSON))); + request.setJsonEntity("{\"query\": \"SELECT COUNT(*) FROM test\"}"); + Map actual = responseToMap(client.performRequest(request)); if (false == expected.equals(actual)) { NotEqualMessageBuilder message = new NotEqualMessageBuilder(); diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java index 6ac1c2c11ea9b..5833ef6dae5a1 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java @@ -10,6 +10,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Nullable; @@ -176,14 +177,15 @@ private static Map runSql(@Nullable String asUser, String mode, } private static Map runSql(@Nullable String asUser, String mode, HttpEntity entity) throws IOException { - Map params = new TreeMap<>(); - params.put("format", "json"); // JSON is easier to parse then a table - if (Strings.hasText(mode)) { - params.put("mode", mode); // JDBC or PLAIN mode + Request request = new Request("POST", "/_xpack/sql"); + if (false == mode.isEmpty()) { + request.addParameter("mode", mode); } - Header[] headers = asUser == null ? new Header[0] : new Header[] {new BasicHeader("es-security-runas-user", asUser)}; - Response response = client().performRequest("POST", "/_xpack/sql", params, entity, headers); - return toMap(response); + if (asUser != null) { + request.setHeaders(new BasicHeader("es-security-runas-user", asUser)); + } + request.setEntity(entity); + return toMap(client().performRequest(request)); } private static void assertResponse(Map expected, Map actual) { diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java index 205cd479dde1b..481e7a4f60f19 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java @@ -11,6 +11,7 @@ import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -41,7 +42,6 @@ import java.util.function.Function; import java.util.regex.Pattern; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; @@ -135,6 +135,9 @@ public void oneTimeSetup() throws Exception { * write the test data once. */ return; } + Request request = new Request("PUT", "/_bulk"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); bulk.append("{\"index\":{\"_index\": \"test\", \"_type\": \"doc\", \"_id\":\"1\"}\n"); bulk.append("{\"a\": 1, \"b\": 2, \"c\": 3}\n"); @@ -142,8 +145,8 @@ public void oneTimeSetup() throws Exception { bulk.append("{\"a\": 4, \"b\": 5, \"c\": 6}\n"); bulk.append("{\"index\":{\"_index\": \"bort\", \"_type\": \"doc\", \"_id\":\"1\"}\n"); bulk.append("{\"a\": \"test\"}\n"); - client().performRequest("PUT", "/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + request.setJsonEntity(bulk.toString()); + client().performRequest(request); oneTimeSetup = true; } @@ -173,7 +176,7 @@ public void setInitialAuditLogOffset() { @AfterClass public static void wipeIndicesAfterTests() throws IOException { try { - adminClient().performRequest("DELETE", "*"); + adminClient().performRequest(new Request("DELETE", "*")); } catch (ResponseException e) { // 404 here just means we had no indexes if (e.getResponse().getStatusLine().getStatusCode() != 404) { @@ -472,13 +475,15 @@ public void testNoGetIndex() throws Exception { } protected static void createUser(String name, String role) throws IOException { - XContentBuilder user = JsonXContent.contentBuilder().prettyPrint().startObject(); { + Request request = new Request("PUT", "/_xpack/security/user/" + name); + XContentBuilder user = JsonXContent.contentBuilder().prettyPrint(); + user.startObject(); { user.field("password", "testpass"); user.field("roles", role); } user.endObject(); - client().performRequest("PUT", "/_xpack/security/user/" + name, emptyMap(), - new StringEntity(Strings.toString(user), ContentType.APPLICATION_JSON)); + request.setJsonEntity(Strings.toString(user)); + client().performRequest(request); } protected AuditLogAsserter createAuditLogAsserter() { diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/CliIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/CliIntegrationTestCase.java index 63795edecf855..6adf37ff325e6 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/CliIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/CliIntegrationTestCase.java @@ -5,9 +5,9 @@ */ package org.elasticsearch.xpack.qa.sql.cli; -import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -19,7 +19,6 @@ import java.io.IOException; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.qa.sql.rest.RestSqlTestCase.assertNoSearchContexts; public abstract class CliIntegrationTestCase extends ESRestTestCase { @@ -60,11 +59,13 @@ protected SecurityConfig securityConfig() { } protected void index(String index, CheckedConsumer body) throws IOException { + Request request = new Request("PUT", "/" + index + "/doc/1"); + request.addParameter("refresh", "true"); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); body.accept(builder); builder.endObject(); - HttpEntity doc = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/" + index + "/doc/1", singletonMap("refresh", "true"), doc); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); } public String command(String command) throws IOException { diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java index 9a5d5b9c3eaca..f93ae339a820d 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ErrorsTestCase.java @@ -8,8 +8,7 @@ import java.io.IOException; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; - -import static java.util.Collections.emptyMap; +import org.elasticsearch.client.Request; import static org.hamcrest.Matchers.startsWith; @@ -41,7 +40,9 @@ public void testSelectFromMissingIndex() throws IOException { @Override public void testSelectFromIndexWithoutTypes() throws Exception { // Create an index without any types - client().performRequest("PUT", "/test", emptyMap(), new StringEntity("{}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/test"); + request.setJsonEntity("{}"); + client().performRequest(request); assertFoundOneProblem(command("SELECT * FROM test")); assertEquals("line 1:15: [test] doesn't have any types so it is incompatible with sql" + END, readLine()); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/FetchSizeTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/FetchSizeTestCase.java index dc34b9c1101c7..542e71ea1841e 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/FetchSizeTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/FetchSizeTestCase.java @@ -7,10 +7,10 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import java.io.IOException; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsString; /** @@ -18,13 +18,16 @@ */ public abstract class FetchSizeTestCase extends CliIntegrationTestCase { public void testSelect() throws IOException { + Request request = new Request("PUT", "/test/doc/_bulk"); + request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); for (int i = 0; i < 20; i++) { bulk.append("{\"index\":{}}\n"); bulk.append("{\"test_field\":" + i + "}\n"); } - client().performRequest("PUT", "/test/doc/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + request.setJsonEntity(bulk.toString()); + client().performRequest(request); + assertEquals("[?1l>[?1000l[?2004lfetch size set to [90m4[0m", command("fetch size = 4")); assertEquals("[?1l>[?1000l[?2004lfetch separator set to \"[90m -- fetch sep -- [0m\"", command("fetch separator = \" -- fetch sep -- \"")); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java index 9137e2028aa50..f3fdd8e267ac3 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java @@ -8,6 +8,7 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; @@ -55,6 +56,7 @@ private static void createString(String name, XContentBuilder builder) throws Ex .endObject(); } protected static void loadDatasetIntoEs(RestClient client, String index) throws Exception { + Request request = new Request("PUT", "/" + index); XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); createIndex.startObject("settings"); { @@ -91,11 +93,9 @@ protected static void loadDatasetIntoEs(RestClient client, String index) throws createIndex.endObject(); } createIndex.endObject().endObject(); - - client.performRequest("PUT", "/" + index, emptyMap(), new StringEntity(Strings.toString(createIndex), - ContentType.APPLICATION_JSON)); + request.setJsonEntity(Strings.toString(createIndex)); + client.performRequest(request); - Map deps = new LinkedHashMap<>(); csvToLines("departments", (titles, fields) -> deps.put(fields.get(0), fields.get(1))); @@ -119,6 +119,8 @@ protected static void loadDatasetIntoEs(RestClient client, String index) throws list.add(dep); }); + request = new Request("POST", "/" + index + "/emp/_bulk"); + request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); csvToLines("employees", (titles, fields) -> { bulk.append("{\"index\":{}}\n"); @@ -146,17 +148,16 @@ protected static void loadDatasetIntoEs(RestClient client, String index) throws bulk.setLength(bulk.length() - 1); bulk.append("]"); } - + bulk.append("}\n"); }); - - client.performRequest("POST", "/" + index + "/emp/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + request.setJsonEntity(bulk.toString()); + client.performRequest(request); } protected static void makeAlias(RestClient client, String aliasName, String... indices) throws Exception { for (String index : indices) { - client.performRequest("POST", "/" + index + "/_alias/" + aliasName); + client.performRequest(new Request("POST", "/" + index + "/_alias/" + aliasName)); } } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java index 0fffb0dac4c3b..ea6c5f165ee6f 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ErrorsTestCase.java @@ -9,8 +9,7 @@ import java.sql.SQLException; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; - -import static java.util.Collections.emptyMap; +import org.elasticsearch.client.Request; import static org.hamcrest.Matchers.startsWith; @@ -37,7 +36,9 @@ public void testSelectFromMissingIndex() throws SQLException { @Override public void testSelectFromIndexWithoutTypes() throws Exception { // Create an index without any types - client().performRequest("PUT", "/test", emptyMap(), new StringEntity("{}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/test"); + request.setJsonEntity("{}"); + client().performRequest(request); try (Connection c = esJdbc()) { SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FROM test").executeQuery()); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/FetchSizeTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/FetchSizeTestCase.java index de7cf465acacf..4d2487a0c03ff 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/FetchSizeTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/FetchSizeTestCase.java @@ -7,6 +7,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.junit.Before; import java.io.IOException; @@ -15,7 +16,6 @@ import java.sql.SQLException; import java.sql.Statement; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.qa.sql.rest.RestSqlTestCase.assertNoSearchContexts; /** @@ -25,13 +25,15 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase { @Before public void createTestIndex() throws IOException { + Request request = new Request("PUT", "/test/doc/_bulk"); + request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); for (int i = 0; i < 20; i++) { bulk.append("{\"index\":{}}\n"); bulk.append("{\"test_field\":" + i + "}\n"); } - client().performRequest("PUT", "/test/doc/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + request.setJsonEntity(bulk.toString()); + client().performRequest(request); } /** diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java index aa5dc5c0ac2b6..fc0cd67efac14 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java @@ -9,6 +9,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -85,16 +86,18 @@ protected Connection useDataSource() throws SQLException { } public static void index(String index, CheckedConsumer body) throws IOException { + Request request = new Request("PUT", "/" + index + "/doc/1"); + request.addParameter("refresh", "true"); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); body.accept(builder); builder.endObject(); - HttpEntity doc = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/" + index + "/doc/1", singletonMap("refresh", "true"), doc); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); } protected String clusterName() { try { - String response = EntityUtils.toString(client().performRequest("GET", "/").getEntity()); + String response = EntityUtils.toString(client().performRequest(new Request("GET", "/")).getEntity()); return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false).get("cluster_name").toString(); } catch (IOException e) { throw new RuntimeException(e); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java index 5a589f94d28d4..d8ba1ade959ae 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.qa.sql.jdbc; import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; @@ -49,7 +50,7 @@ public SpecBaseIntegrationTestCase(String fileName, String groupName, String tes @Before public void setupTestDataIfNeeded() throws Exception { - if (client().performRequest("HEAD", "/test_emp").getStatusLine().getStatusCode() == 404) { + if (client().performRequest(new Request("HEAD", "/test_emp")).getStatusLine().getStatusCode() == 404) { DataLoader.loadDatasetIntoEs(client()); } } @@ -62,7 +63,7 @@ protected boolean preserveIndicesUponCompletion() { @AfterClass public static void wipeTestData() throws IOException { try { - adminClient().performRequest("DELETE", "/*"); + adminClient().performRequest(new Request("DELETE", "/*")); } catch (ResponseException e) { // 404 here just means we had no indexes if (e.getResponse().getStatusLine().getStatusCode() != 404) { diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java index e970fcaa88a89..cf2647dcbfea1 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java @@ -12,6 +12,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.CheckedSupplier; @@ -74,16 +75,19 @@ public void testBasicQuery() throws IOException { } public void testNextPage() throws IOException { + Request request = new Request("POST", "/test/test/_bulk"); + request.addParameter("refresh", "true"); String mode = randomMode(); StringBuilder bulk = new StringBuilder(); for (int i = 0; i < 20; i++) { bulk.append("{\"index\":{\"_id\":\"" + i + "\"}}\n"); bulk.append("{\"text\":\"text" + i + "\", \"number\":" + i + "}\n"); } - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + request.setJsonEntity(bulk.toString()); + client().performRequest(request); - String request = "{\"query\":\"" + String sqlRequest = + "{\"query\":\"" + " SELECT text, number, SQRT(number) AS s, SCORE()" + " FROM test" + " ORDER BY number, SCORE()\", " @@ -94,7 +98,7 @@ public void testNextPage() throws IOException { for (int i = 0; i < 20; i += 2) { Map response; if (i == 0) { - response = runSql(mode, new StringEntity(request, ContentType.APPLICATION_JSON)); + response = runSql(mode, new StringEntity(sqlRequest, ContentType.APPLICATION_JSON)); } else { response = runSql(mode, new StringEntity("{\"cursor\":\"" + cursor + "\"}", ContentType.APPLICATION_JSON)); @@ -138,12 +142,14 @@ public void testTimeZone() throws IOException { } public void testScoreWithFieldNamedScore() throws IOException { + Request request = new Request("POST", "/test/test/_bulk"); + request.addParameter("refresh", "true"); String mode = randomMode(); StringBuilder bulk = new StringBuilder(); bulk.append("{\"index\":{\"_id\":\"1\"}}\n"); bulk.append("{\"name\":\"test\", \"score\":10}\n"); - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + request.setJsonEntity(bulk.toString()); + client().performRequest(request); Map expected = new HashMap<>(); expected.put("columns", Arrays.asList( @@ -209,7 +215,9 @@ public void testSelectFromMissingIndex() { @Override public void testSelectFromIndexWithoutTypes() throws Exception { // Create an index without any types - client().performRequest("PUT", "/test", emptyMap(), new StringEntity("{}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/test"); + request.setJsonEntity("{}"); + client().performRequest(request); String mode = randomFrom("jdbc", "plain"); expectBadRequest(() -> runSql(mode, "SELECT * FROM test"), containsString("1:15: [test] doesn't have any types so it is incompatible with sql")); @@ -229,24 +237,9 @@ public void testSelectMissingFunction() throws Exception { containsString("1:8: Unknown function [missing]")); } - private void index(String... docs) throws IOException { - StringBuilder bulk = new StringBuilder(); - for (String doc : docs) { - bulk.append("{\"index\":{}\n"); - bulk.append(doc + "\n"); - } - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); - } - @Override public void testSelectProjectScoreInAggContext() throws Exception { - StringBuilder bulk = new StringBuilder(); - bulk.append("{\"index\":{\"_id\":\"1\"}}\n"); - bulk.append("{\"foo\":1}\n"); - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); - + index("{\"foo\":1}"); expectBadRequest(() -> runSql(randomMode(), " SELECT foo, SCORE(), COUNT(*)" + " FROM test" @@ -256,12 +249,7 @@ public void testSelectProjectScoreInAggContext() throws Exception { @Override public void testSelectOrderByScoreInAggContext() throws Exception { - StringBuilder bulk = new StringBuilder(); - bulk.append("{\"index\":{\"_id\":\"1\"}}\n"); - bulk.append("{\"foo\":1}\n"); - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); - + index("{\"foo\":1}"); expectBadRequest(() -> runSql(randomMode(), " SELECT foo, COUNT(*)" + " FROM test" @@ -272,36 +260,21 @@ public void testSelectOrderByScoreInAggContext() throws Exception { @Override public void testSelectGroupByScore() throws Exception { - StringBuilder bulk = new StringBuilder(); - bulk.append("{\"index\":{\"_id\":\"1\"}}\n"); - bulk.append("{\"foo\":1}\n"); - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); - + index("{\"foo\":1}"); expectBadRequest(() -> runSql(randomMode(), "SELECT COUNT(*) FROM test GROUP BY SCORE()"), containsString("Cannot use [SCORE()] for grouping")); } @Override public void testSelectScoreSubField() throws Exception { - StringBuilder bulk = new StringBuilder(); - bulk.append("{\"index\":{\"_id\":\"1\"}}\n"); - bulk.append("{\"foo\":1}\n"); - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); - + index("{\"foo\":1}"); expectBadRequest(() -> runSql(randomMode(), "SELECT SCORE().bar FROM test"), containsString("line 1:15: extraneous input '.' expecting {, ','")); } @Override public void testSelectScoreInScalar() throws Exception { - StringBuilder bulk = new StringBuilder(); - bulk.append("{\"index\":{\"_id\":\"1\"}}\n"); - bulk.append("{\"foo\":1}\n"); - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); - + index("{\"foo\":1}"); expectBadRequest(() -> runSql(randomMode(), "SELECT SIN(SCORE()) FROM test"), containsString("line 1:12: [SCORE()] cannot be an argument to a function")); } @@ -340,37 +313,32 @@ private Map runSql(String mode, HttpEntity sql) throws IOExcepti } private Map runSql(String mode, HttpEntity sql, String suffix) throws IOException { - Map params = new TreeMap<>(); - params.put("error_trace", "true"); // Helps with debugging in case something crazy happens on the server. - params.put("pretty", "true"); // Improves error reporting readability + Request request = new Request("POST", "/_xpack/sql" + suffix); + request.addParameter("error_trace", "true"); // Helps with debugging in case something crazy happens on the server. + request.addParameter("pretty", "true"); // Improves error reporting readability if (randomBoolean()) { // We default to JSON but we force it randomly for extra coverage - params.put("format", "json"); + request.addParameter("format", "json"); } - if (Strings.hasText(mode)) { - params.put("mode", mode); // JDBC or PLAIN mode + if (false == mode.isEmpty()) { + request.addParameter("mode", mode); // JDBC or PLAIN mode } - Header[] headers = randomFrom( + request.setHeaders(randomFrom( new Header[] {}, new Header[] {new BasicHeader("Accept", "*/*")}, - new Header[] {new BasicHeader("Accpet", "application/json")}); - Response response = client().performRequest("POST", "/_xpack/sql" + suffix, params, sql); + new Header[] {new BasicHeader("Accpet", "application/json")})); + request.setEntity(sql); + Response response = client().performRequest(request); try (InputStream content = response.getEntity().getContent()) { return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); } } public void testBasicTranslateQuery() throws IOException { - StringBuilder bulk = new StringBuilder(); - bulk.append("{\"index\":{\"_id\":\"1\"}}\n"); - bulk.append("{\"test\":\"test\"}\n"); - bulk.append("{\"index\":{\"_id\":\"2\"}}\n"); - bulk.append("{\"test\":\"test\"}\n"); - client().performRequest("POST", "/test_translate/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + index("{\"test\":\"test\"}", "{\"test\":\"test\"}"); - Map response = runSql(randomMode(), "SELECT * FROM test_translate", "/translate/"); - assertEquals(response.get("size"), 1000); + Map response = runSql(randomMode(), "SELECT * FROM test", "/translate/"); + assertEquals(1000, response.get("size")); @SuppressWarnings("unchecked") Map source = (Map) response.get("_source"); assertNotNull(source); @@ -459,13 +427,12 @@ public void testBasicQueryText() throws IOException { } public void testNextPageText() throws IOException { - StringBuilder bulk = new StringBuilder(); - for (int i = 0; i < 20; i++) { - bulk.append("{\"index\":{\"_id\":\"" + i + "\"}}\n"); - bulk.append("{\"text\":\"text" + i + "\", \"number\":" + i + "}\n"); + int size = 20; + String[] docs = new String[size]; + for (int i = 0; i < size; i++) { + docs[i] = "{\"text\":\"text" + i + "\", \"number\":" + i + "}\n"; } - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + index(docs); String request = "{\"query\":\"SELECT text, number, number + 5 AS sum FROM test ORDER BY number\", \"fetch_size\":2}"; @@ -563,23 +530,33 @@ private Tuple runSqlAsText(String sql, String accept) throws IOE return runSqlAsText("", new StringEntity("{\"query\":\"" + sql + "\"}", ContentType.APPLICATION_JSON), accept); } + /** + * Run SQL as text using the {@code Accept} header to specify the format + * rather than the {@code format} parameter. + */ private Tuple runSqlAsText(String suffix, HttpEntity entity, String accept) throws IOException { - Response response = client().performRequest("POST", "/_xpack/sql" + suffix, singletonMap("error_trace", "true"), - entity, new BasicHeader("Accept", accept)); + Request request = new Request("POST", "/_xpack/sql" + suffix); + request.addParameter("error_trace", "true"); + request.setEntity(entity); + request.setHeaders(new BasicHeader("Accept", accept)); + Response response = client().performRequest(request); return new Tuple<>( Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)), response.getHeader("Cursor") ); } + /** + * Run SQL as text using the {@code format} parameter to specify the format + * rather than an {@code Accept} header. + */ private Tuple runSqlAsTextFormat(String sql, String format) throws IOException { - StringEntity entity = new StringEntity("{\"query\":\"" + sql + "\"}", ContentType.APPLICATION_JSON); - - Map params = new HashMap<>(); - params.put("error_trace", "true"); - params.put("format", format); + Request request = new Request("POST", "/_xpack/sql"); + request.addParameter("error_trace", "true"); + request.addParameter("format", format); + request.setJsonEntity("{\"query\":\"" + sql + "\"}"); - Response response = client().performRequest("POST", "/_xpack/sql", params, entity); + Response response = client().performRequest(request); return new Tuple<>( Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)), response.getHeader("Cursor") @@ -595,23 +572,14 @@ private void assertResponse(Map expected, Map ac } public static int getNumberOfSearchContexts(String index) throws IOException { - Response response = client().performRequest("GET", "/_stats/search"); - Map stats; - try (InputStream content = response.getEntity().getContent()) { - stats = XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); - } - return getOpenContexts(stats, index); + return getOpenContexts(searchStats(), index); } public static void assertNoSearchContexts() throws IOException { - Response response = client().performRequest("GET", "/_stats/search"); - Map stats; - try (InputStream content = response.getEntity().getContent()) { - stats = XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); - } + Map stats = searchStats(); @SuppressWarnings("unchecked") - Map indexStats = (Map) stats.get("indices"); - for (String index : indexStats.keySet()) { + Map indicesStats = (Map) stats.get("indices"); + for (String index : indicesStats.keySet()) { if (index.startsWith(".") == false) { // We are not interested in internal indices assertEquals(index + " should have no search contexts", 0, getOpenContexts(stats, index)); } @@ -619,12 +587,34 @@ public static void assertNoSearchContexts() throws IOException { } @SuppressWarnings("unchecked") - public static int getOpenContexts(Map indexStats, String index) { - return (int) ((Map) ((Map) ((Map) ((Map) - indexStats.get("indices")).get(index)).get("total")).get("search")).get("open_contexts"); + private static int getOpenContexts(Map stats, String index) { + stats = (Map) stats.get("indices"); + stats = (Map) stats.get(index); + stats = (Map) stats.get("total"); + stats = (Map) stats.get("search"); + return (Integer) stats.get("open_contexts"); + } + + private static Map searchStats() throws IOException { + Response response = client().performRequest(new Request("GET", "/_stats/search")); + try (InputStream content = response.getEntity().getContent()) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); + } } public static String randomMode() { return randomFrom("", "jdbc", "plain"); } + + private void index(String... docs) throws IOException { + Request request = new Request("POST", "/test/test/_bulk"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); + for (String doc : docs) { + bulk.append("{\"index\":{}\n"); + bulk.append(doc + "\n"); + } + request.setJsonEntity(bulk.toString()); + client().performRequest(request); + } } diff --git a/x-pack/qa/sql/src/main/resources/setup_mock_metadata_get_columns.sql b/x-pack/qa/sql/src/main/resources/setup_mock_metadata_get_columns.sql index 3d8cf4708945e..69c572f4ddd4e 100644 --- a/x-pack/qa/sql/src/main/resources/setup_mock_metadata_get_columns.sql +++ b/x-pack/qa/sql/src/main/resources/setup_mock_metadata_get_columns.sql @@ -25,26 +25,26 @@ CREATE TABLE mock ( ) AS SELECT null, 'test1', 'name', 12, 'TEXT', 0, 2147483647, null, null, 1, -- columnNullable - null, null, 12, null, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO' + null, null, 12, 0, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL UNION ALL SELECT null, 'test1', 'name.keyword', 12, 'KEYWORD', 0, 2147483647, null, null, 1, -- columnNullable - null, null, 12, null, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO' + null, null, 12, 0, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL UNION ALL -SELECT null, 'test2', 'date', 93, 'DATE', 20, 8, null, null, +SELECT null, 'test2', 'date', 93, 'DATE', 24, 8, null, null, 1, -- columnNullable - null, null, 93, null, null, 1, 'YES', null, null, null, null, 'NO', 'NO' + null, null, 9, 3, null, 1, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL UNION ALL SELECT null, 'test2', 'float', 7, 'FLOAT', 15, 4, null, 2, 1, -- columnNullable - null, null, 7, null, null, 2, 'YES', null, null, null, null, 'NO', 'NO' + null, null, 7, 0, null, 2, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL UNION ALL SELECT null, 'test2', 'number', -5, 'LONG', 20, 8, null, 10, 1, -- columnNullable - null, null, -5, null, null, 3, 'YES', null, null, null, null, 'NO', 'NO' + null, null, -5, 0, null, 3, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL ; diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 1d73d1f0d2979..11da59e44d6fe 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -22,13 +22,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.client.SecurityClient; +import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -198,7 +198,7 @@ public void cleanupSecurityIndex() throws Exception { @Override public Set excludeTemplates() { Set templates = Sets.newHashSet(super.excludeTemplates()); - templates.add(SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME); // don't remove the security index template + templates.add(SecurityIndexManager.SECURITY_TEMPLATE_NAME); // don't remove the security index template return templates; } diff --git a/x-pack/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java b/x-pack/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java index 5b60296ee6d6c..d14e76f223ffe 100644 --- a/x-pack/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java +++ b/x-pack/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/xpack/security/SecurityTribeTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.tribe.TribePlugin; import org.elasticsearch.tribe.TribeService; -import org.elasticsearch.xpack.core.security.SecurityLifecycleServiceField; import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; @@ -60,6 +59,7 @@ import java.util.function.Predicate; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_TEMPLATE_NAME; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; @@ -153,7 +153,7 @@ public static void tearDownSecondCluster() { public void tearDownTribeNodeAndWipeCluster() throws Exception { if (cluster2 != null) { try { - cluster2.wipe(Collections.singleton(SecurityLifecycleServiceField.SECURITY_TEMPLATE_NAME)); + cluster2.wipe(Collections.singleton(SECURITY_TEMPLATE_NAME)); try { // this is a hack to clean up the .security index since only the XPackSecurity user or superusers can delete it final Client cluster2Client = cluster2.client().filterWithHeader(Collections.singletonMap("Authorization",