diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index e0d1dd983c0de..3859dfa1ddbb9 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ +import org.elasticsearch.gradle.OS + apply plugin: 'elasticsearch.build' base { @@ -38,6 +40,12 @@ tasks.named("dependencyLicenses").configure { tasks.named("test").configure { // TODO: find a way to add permissions for the tests in this module systemProperty 'tests.security.manager', 'false' + // These tests are "heavy" on the secure number generator. On Linux, the NativePRNG defaults to /dev/random for the seeds, and + // its entropy is quite limited, to the point that it's known to hang: https://bugs.openjdk.org/browse/JDK-6521844 + // We force the seed to be initialized from /dev/urandom, which is less secure, but in case of unit tests is not important. + if (OS.current() == OS.LINUX) { + systemProperty 'java.security.egd', 'file:/dev/urandom' + } } /* diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index f7882a3fce743..c088e89338e74 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -118,7 +118,6 @@ import static org.mockito.Mockito.spy; @LuceneTestCase.SuppressFileSystems("*") -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102783") public class InstallPluginActionTests extends ESTestCase { private InstallPluginAction skipJarHellAction; diff --git a/docs/changelog/102934.yaml b/docs/changelog/102934.yaml new file mode 100644 index 0000000000000..4f61427506cf3 --- /dev/null +++ b/docs/changelog/102934.yaml @@ -0,0 +1,6 @@ +pr: 102934 +summary: Ensure transform updates only modify the expected transform task +area: Transform +type: bug +issues: + - 102933 diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index e433ce0b60596..ae1adf4160c2a 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -305,7 +305,6 @@ public void testHasParentFilter() throws Exception { constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false)) ).setSize(numChildDocsPerParent), response -> { - assertNoFailures(response); Set childIds = parentToChildrenEntry.getValue(); assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); for (int i = 0; i < response.getHits().getTotalHits().value; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java index f2e0511ffb7ab..7eaed125156e0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java @@ -24,6 +24,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; public class LookupRuntimeFieldIT extends ESIntegTestCase { @@ -132,90 +134,92 @@ public void populateIndex() throws Exception { } public void testBasic() { - SearchResponse searchResponse = prepareSearch("books").addFetchField("author") - .addFetchField("title") - .addSort("published_date", SortOrder.DESC) - .setSize(3) - .get(); - ElasticsearchAssertions.assertNoFailures(searchResponse); - ElasticsearchAssertions.assertHitCount(searchResponse, 5); + assertNoFailuresAndResponse( + prepareSearch("books").addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.DESC).setSize(3), + searchResponse -> { + ElasticsearchAssertions.assertHitCount(searchResponse, 5); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); - assertThat( - hit0.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) - ); + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); - SearchHit hit1 = searchResponse.getHits().getHits()[1]; - assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); - assertThat( - hit1.field("author").getValues(), - equalTo( - List.of( - Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), - Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) - ) - ) - ); + SearchHit hit1 = searchResponse.getHits().getHits()[1]; + assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); + assertThat( + hit1.field("author").getValues(), + equalTo( + List.of( + Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), + Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) + ) + ) + ); - SearchHit hit2 = searchResponse.getHits().getHits()[2]; - assertThat(hit2.field("title").getValues(), equalTo(List.of("the third book"))); - assertThat( - hit2.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + SearchHit hit2 = searchResponse.getHits().getHits()[2]; + assertThat(hit2.field("title").getValues(), equalTo(List.of("the third book"))); + assertThat( + hit2.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); + } ); } public void testLookupMultipleIndices() throws IOException { - SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" - { - "publisher": { - "type": "lookup", - "target_index": "publishers", - "input_field": "publisher_id", - "target_field": "_id", - "fetch_fields": ["name", "city"] + assertResponse( + prepareSearch("books").setRuntimeMappings(parseMapping(""" + { + "publisher": { + "type": "lookup", + "target_index": "publishers", + "input_field": "publisher_id", + "target_field": "_id", + "fetch_fields": ["name", "city"] + } } - } - """)) - .setFetchSource(false) - .addFetchField("title") - .addFetchField("author") - .addFetchField("publisher") - .addSort("published_date", SortOrder.DESC) - .setSize(2) - .get(); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); - assertThat( - hit0.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) - ); - assertThat( - hit0.field("publisher").getValues(), - equalTo(List.of(Map.of("name", List.of("The second publisher"), "city", List.of("Toronto")))) - ); + """)) + .setFetchSource(false) + .addFetchField("title") + .addFetchField("author") + .addFetchField("publisher") + .addSort("published_date", SortOrder.DESC) + .setSize(2), + searchResponse -> { + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); + assertThat( + hit0.field("publisher").getValues(), + equalTo(List.of(Map.of("name", List.of("The second publisher"), "city", List.of("Toronto")))) + ); - SearchHit hit1 = searchResponse.getHits().getHits()[1]; - assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); - assertThat( - hit1.field("author").getValues(), - equalTo( - List.of( - Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), - Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) - ) - ) - ); - assertThat( - hit1.field("publisher").getValues(), - equalTo(List.of(Map.of("name", List.of("The first publisher"), "city", List.of("Montreal", "Vancouver")))) + SearchHit hit1 = searchResponse.getHits().getHits()[1]; + assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); + assertThat( + hit1.field("author").getValues(), + equalTo( + List.of( + Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), + Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) + ) + ) + ); + assertThat( + hit1.field("publisher").getValues(), + equalTo(List.of(Map.of("name", List.of("The first publisher"), "city", List.of("Montreal", "Vancouver")))) + ); + } ); } public void testFetchField() throws Exception { - SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" + assertNoFailuresAndResponse(prepareSearch("books").setRuntimeMappings(parseMapping(""" { "author": { "type": "lookup", @@ -225,12 +229,15 @@ public void testFetchField() throws Exception { "fetch_fields": ["first_name", {"field": "joined", "format": "MM/yyyy"}] } } - """)).addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.ASC).setSize(1).get(); - ElasticsearchAssertions.assertNoFailures(searchResponse); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - // "author", "john", "first_name", "John", "last_name", "New York", "joined", "2020-03-01" - assertThat(hit0.field("title").getValues(), equalTo(List.of("the first book"))); - assertThat(hit0.field("author").getValues(), equalTo(List.of(Map.of("first_name", List.of("John"), "joined", List.of("03/2020"))))); + """)).addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.ASC).setSize(1), searchResponse -> { + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + // "author", "john", "first_name", "John", "last_name", "New York", "joined", "2020-03-01" + assertThat(hit0.field("title").getValues(), equalTo(List.of("the first book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("John"), "joined", List.of("03/2020")))) + ); + }); } private Map parseMapping(String mapping) throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index d3e312e173c29..21bbd32e6bf26 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -49,7 +49,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.empty; @@ -83,9 +83,10 @@ public void testBasic() { } refresh("test"); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); - SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertThat(resp1.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp1, numDocs); + assertResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp1 -> { + assertThat(resp1.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp1, numDocs); + }); int deletedDocs = 0; for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { @@ -96,18 +97,20 @@ public void testBasic() { } refresh("test"); if (randomBoolean()) { - SearchResponse resp2 = prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get(); - assertNoFailures(resp2); - assertHitCount(resp2, numDocs - deletedDocs); + final int delDocCount = deletedDocs; + assertNoFailuresAndResponse( + prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()), + resp2 -> assertHitCount(resp2, numDocs - delDocCount) + ); } try { - SearchResponse resp3 = prepareSearch().setPreference(null) - .setQuery(new MatchAllQueryBuilder()) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertNoFailures(resp3); - assertHitCount(resp3, numDocs); - assertThat(resp3.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse( + prepareSearch().setPreference(null).setQuery(new MatchAllQueryBuilder()).setPointInTime(new PointInTimeBuilder(pitId)), + resp3 -> { + assertHitCount(resp3, numDocs); + assertThat(resp3.pointInTimeId(), equalTo(pitId)); + } + ); } finally { closePointInTime(pitId); } @@ -127,27 +130,24 @@ public void testMultipleIndices() { refresh(); String pitId = openPointInTime(new String[] { "*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertNotNull(resp.pointInTimeId()); - assertThat(resp.pointInTimeId(), equalTo(pitId)); int moreDocs = randomIntBetween(10, 50); - for (int i = 0; i < moreDocs; i++) { - String id = "more-" + i; - String index = "index-" + randomIntBetween(1, numIndices); - prepareIndex(index).setId(id).setSource("value", i).get(); - } - refresh(); - resp = prepareSearch().get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs + moreDocs); - - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertNotNull(resp.pointInTimeId()); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertNotNull(resp.pointInTimeId()); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + for (int i = 0; i < moreDocs; i++) { + String id = "more-" + i; + String index = "index-" + randomIntBetween(1, numIndices); + prepareIndex(index).setId(id).setSource("value", i).get(); + } + refresh(); + }); + assertNoFailuresAndResponse(prepareSearch(), resp -> assertHitCount(resp, numDocs + moreDocs)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertNotNull(resp.pointInTimeId()); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); } finally { closePointInTime(pitId); } @@ -187,8 +187,7 @@ public void testIndexFilter() { String[] actualIndices = searchContextId.getActualIndices(); assertEquals(1, actualIndices.length); assertEquals("index-3", actualIndices[0]); - assertResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(50), resp -> { - assertNoFailures(resp); + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(50), resp -> { assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -213,10 +212,10 @@ public void testRelocation() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); final Set dataNodes = clusterService().state() .nodes() .getDataNodes() @@ -233,10 +232,10 @@ public void testRelocation() throws Exception { } refresh(); } - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); assertBusy(() -> { final Set assignedNodes = clusterService().state() .routingTable() @@ -246,10 +245,10 @@ public void testRelocation() throws Exception { .collect(Collectors.toSet()); assertThat(assignedNodes, everyItem(not(in(excludedNodes)))); }, 30, TimeUnit.SECONDS); - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); } finally { closePointInTime(pitId); } @@ -264,17 +263,21 @@ public void testPointInTimeNotFound() throws Exception { } refresh(); String pit = openPointInTime(new String[] { "index" }, TimeValue.timeValueSeconds(5)); - SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertNoFailures(resp1); - assertHitCount(resp1, index1); - if (rarely()) { - assertBusy(() -> { - final CommonStats stats = indicesAdmin().prepareStats().setSearch(true).get().getTotal(); - assertThat(stats.search.getOpenContexts(), equalTo(0L)); - }, 60, TimeUnit.SECONDS); - } else { - closePointInTime(resp1.pointInTimeId()); - } + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), resp1 -> { + assertHitCount(resp1, index1); + if (rarely()) { + try { + assertBusy(() -> { + final CommonStats stats = indicesAdmin().prepareStats().setSearch(true).get().getTotal(); + assertThat(stats.search.getOpenContexts(), equalTo(0L)); + }, 60, TimeUnit.SECONDS); + } catch (Exception e) { + throw new AssertionError(e); + } + } else { + closePointInTime(resp1.pointInTimeId()); + } + }); SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, () -> prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get() @@ -302,20 +305,23 @@ public void testIndexNotFound() { refresh(); String pit = openPointInTime(new String[] { "index-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertNoFailures(resp); - assertHitCount(resp, index1 + index2); + assertNoFailuresAndResponse( + prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), + resp -> assertHitCount(resp, index1 + index2) + ); indicesAdmin().prepareDelete("index-1").get(); if (randomBoolean()) { - resp = prepareSearch("index-*").get(); - assertNoFailures(resp); - assertHitCount(resp, index2); + assertNoFailuresAndResponse(prepareSearch("index-*"), resp -> assertHitCount(resp, index2)); } // Allow partial search result - resp = prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertFailures(resp); - assertHitCount(resp, index2); + assertResponse( + prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)), + resp -> { + assertFailures(resp); + assertHitCount(resp, index2); + } + ); // Do not allow partial search result expectThrows( @@ -356,14 +362,15 @@ public void testCanMatch() throws Exception { } } prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); - SearchResponse resp = prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) - .setPreFilterShardSize(randomIntBetween(2, 3)) - .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertThat(resp.getHits().getHits(), arrayWithSize(0)); + assertResponse( + prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreference(null) + .setPreFilterShardSize(randomIntBetween(2, 3)) + .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) + .setPointInTime(new PointInTimeBuilder(pitId)), + resp -> assertThat(resp.getHits().getHits(), arrayWithSize(0)) + ); for (String node : internalCluster().nodesInclude("test")) { for (IndexService indexService : internalCluster().getInstance(IndicesService.class, node)) { for (IndexShard indexShard : indexService) { @@ -415,19 +422,20 @@ public void testPartialResults() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs1 + numDocs2); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs1 + numDocs2); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); internalCluster().restartNode(assignedNodeForIndex1); - resp = prepareSearch().setPreference(null) - .setAllowPartialSearchResults(true) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertFailures(resp); - assertThat(resp.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp, numDocs2); + assertResponse( + prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), + resp -> { + assertFailures(resp); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp, numDocs2); + } + ); } finally { closePointInTime(pitId); } @@ -547,40 +555,45 @@ private void assertPagination(PointInTimeBuilder pit, int expectedNumDocs, int s reverseMuls[i] = expectedSorts.get(i).order() == SortOrder.ASC ? 1 : -1; } SearchResponse response = client().search(searchRequest).get(); - Object[] lastSortValues = null; - while (response.getHits().getHits().length > 0) { - Object[] lastHitSortValues = null; - for (SearchHit hit : response.getHits().getHits()) { - assertTrue(seen.add(hit.getIndex() + hit.getId())); - - if (lastHitSortValues != null) { + try { + Object[] lastSortValues = null; + while (response.getHits().getHits().length > 0) { + Object[] lastHitSortValues = null; + for (SearchHit hit : response.getHits().getHits()) { + assertTrue(seen.add(hit.getIndex() + hit.getId())); + + if (lastHitSortValues != null) { + for (int i = 0; i < expectedSorts.size(); i++) { + Comparable value = (Comparable) hit.getRawSortValues()[i]; + int cmp = value.compareTo(lastHitSortValues[i]) * reverseMuls[i]; + if (cmp != 0) { + assertThat(cmp, equalTo(1)); + break; + } + } + } + lastHitSortValues = hit.getRawSortValues(); + } + int len = response.getHits().getHits().length; + SearchHit last = response.getHits().getHits()[len - 1]; + if (lastSortValues != null) { for (int i = 0; i < expectedSorts.size(); i++) { - Comparable value = (Comparable) hit.getRawSortValues()[i]; - int cmp = value.compareTo(lastHitSortValues[i]) * reverseMuls[i]; + Comparable value = (Comparable) last.getSortValues()[i]; + int cmp = value.compareTo(lastSortValues[i]) * reverseMuls[i]; if (cmp != 0) { assertThat(cmp, equalTo(1)); break; } } } - lastHitSortValues = hit.getRawSortValues(); - } - int len = response.getHits().getHits().length; - SearchHit last = response.getHits().getHits()[len - 1]; - if (lastSortValues != null) { - for (int i = 0; i < expectedSorts.size(); i++) { - Comparable value = (Comparable) last.getSortValues()[i]; - int cmp = value.compareTo(lastSortValues[i]) * reverseMuls[i]; - if (cmp != 0) { - assertThat(cmp, equalTo(1)); - break; - } - } + assertThat(last.getSortValues().length, equalTo(expectedSorts.size())); + lastSortValues = last.getSortValues(); + searchRequest.source().searchAfter(last.getSortValues()); + response.decRef(); + response = client().search(searchRequest).get(); } - assertThat(last.getSortValues().length, equalTo(expectedSorts.size())); - lastSortValues = last.getSortValues(); - searchRequest.source().searchAfter(last.getSortValues()); - response = client().search(searchRequest).get(); + } finally { + response.decRef(); } assertThat(seen.size(), equalTo(expectedNumDocs)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java index 8b1acf11a7a5d..7da015052fe82 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java @@ -24,7 +24,9 @@ import java.util.Collection; import java.util.Queue; +import java.util.concurrent.ExecutionException; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; @@ -105,7 +107,7 @@ public void testBasic() { } } - public void testRandom() { + public void testRandom() throws ExecutionException, InterruptedException { int numIndices = randomIntBetween(1, 10); for (int i = 0; i < numIndices; i++) { String index = "index-" + i; @@ -127,21 +129,22 @@ public void testRandom() { RangeQueryBuilder rangeQuery = new RangeQueryBuilder("value").from(from).to(to).includeUpper(true).includeLower(true); SearchRequest searchRequest = new SearchRequest().indices("index-*").source(new SearchSourceBuilder().query(rangeQuery)); searchRequest.setPreFilterShardSize(1); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - var searchShardsRequest = new SearchShardsRequest( - new String[] { "index-*" }, - SearchRequest.DEFAULT_INDICES_OPTIONS, - rangeQuery, - null, - preference, - randomBoolean(), - randomBoolean() ? null : randomAlphaOfLength(10) - ); - var searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); + assertResponse(client().search(searchRequest), searchResponse -> { + var searchShardsRequest = new SearchShardsRequest( + new String[] { "index-*" }, + SearchRequest.DEFAULT_INDICES_OPTIONS, + rangeQuery, + null, + preference, + randomBoolean(), + randomBoolean() ? null : randomAlphaOfLength(10) + ); + var searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); - assertThat(searchShardsResponse.getGroups(), hasSize(searchResponse.getTotalShards())); - long skippedShards = searchShardsResponse.getGroups().stream().filter(SearchShardsGroup::skipped).count(); - assertThat(skippedShards, equalTo((long) searchResponse.getSkippedShards())); + assertThat(searchShardsResponse.getGroups(), hasSize(searchResponse.getTotalShards())); + long skippedShards = searchShardsResponse.getGroups().stream().filter(SearchShardsGroup::skipped).count(); + assertThat(skippedShards, equalTo((long) searchResponse.getSkippedShards())); + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index 31ffe560be010..5bb21dc874747 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -74,6 +74,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -121,7 +122,7 @@ protected Collection> nodePlugins() { return Collections.singletonList(TestPlugin.class); } - public void testLocalClusterAlias() { + public void testLocalClusterAlias() throws ExecutionException, InterruptedException { long nowInMillis = randomLongBetween(0, Long.MAX_VALUE); IndexRequest indexRequest = new IndexRequest("test"); indexRequest.id("1"); @@ -140,14 +141,15 @@ public void testLocalClusterAlias() { nowInMillis, randomBoolean() ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertEquals(1, hits.length); - SearchHit hit = hits[0]; - assertEquals("local", hit.getClusterAlias()); - assertEquals("test", hit.getIndex()); - assertEquals("1", hit.getId()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("local", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -158,14 +160,15 @@ public void testLocalClusterAlias() { nowInMillis, randomBoolean() ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertEquals(1, hits.length); - SearchHit hit = hits[0]; - assertEquals("", hit.getClusterAlias()); - assertEquals("test", hit.getIndex()); - assertEquals("1", hit.getId()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + }); } } @@ -193,8 +196,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce { SearchRequest searchRequest = new SearchRequest(""); searchRequest.indicesOptions(IndicesOptions.fromOptions(true, true, true, true)); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(0, searchResponse.getTotalShards()); + assertResponse(client().search(searchRequest), searchResponse -> assertEquals(0, searchResponse.getTotalShards())); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -217,9 +219,10 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce randomBoolean() ); searchRequest.indices(""); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -236,13 +239,14 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce rangeQuery.lt("1982-01-01"); sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + }); } } - public void testFinalReduce() { + public void testFinalReduce() throws ExecutionException, InterruptedException { long nowInMillis = randomLongBetween(0, Long.MAX_VALUE); TaskId taskId = new TaskId("node", randomNonNegativeLong()); { @@ -274,11 +278,12 @@ public void testFinalReduce() { SearchRequest searchRequest = randomBoolean() ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); - LongTerms longTerms = aggregations.get("terms"); - assertEquals(1, longTerms.getBuckets().size()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(2, searchResponse.getHits().getTotalHits().value); + Aggregations aggregations = searchResponse.getAggregations(); + LongTerms longTerms = aggregations.get("terms"); + assertEquals(1, longTerms.getBuckets().size()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -289,11 +294,12 @@ public void testFinalReduce() { nowInMillis, false ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); - LongTerms longTerms = aggregations.get("terms"); - assertEquals(2, longTerms.getBuckets().size()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(2, searchResponse.getHits().getTotalHits().value); + Aggregations aggregations = searchResponse.getAggregations(); + LongTerms longTerms = aggregations.get("terms"); + assertEquals(2, longTerms.getBuckets().size()); + }); } } @@ -309,7 +315,7 @@ public void testWaitForRefreshIndexValidation() throws Exception { Arrays.fill(validCheckpoints, SequenceNumbers.UNASSIGNED_SEQ_NO); // no exception - prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get(); + prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get().decRef(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -373,7 +379,7 @@ public void testShardCountLimit() throws Exception { assertAcked(prepareCreate("test2").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numPrimaries2))); // no exception - prepareSearch("test1").get(); + prepareSearch("test1").get().decRef(); updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1 - 1)); @@ -386,7 +392,7 @@ public void testShardCountLimit() throws Exception { updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1)); // no exception - prepareSearch("test1").get(); + prepareSearch("test1").get().decRef(); e = expectThrows(IllegalArgumentException.class, () -> prepareSearch("test1", "test2").get()); assertThat( @@ -422,12 +428,13 @@ public void testSearchIdle() throws Exception { prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); - assertBusy(() -> { - SearchResponse resp = prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) - .setPreFilterShardSize(randomIntBetween(1, 3)) - .get(); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); - }); + assertBusy( + () -> assertResponse( + prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + .setPreFilterShardSize(randomIntBetween(1, 3)), + resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + ) + ); } public void testCircuitBreakerReduceFail() throws Exception { @@ -471,7 +478,7 @@ public void onFailure(Exception e) { assertBusy(() -> { Exception exc = expectThrows( Exception.class, - () -> client.prepareSearch("test").addAggregation(new TestAggregationBuilder("test")).get() + () -> client.prepareSearch("test").addAggregation(new TestAggregationBuilder("test")).get().decRef() ); assertThat(exc.getCause().getMessage(), containsString("")); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 837c55e81b471..1887e37cbbf47 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -20,7 +20,7 @@ import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class IndexingMasterFailoverIT extends ESIntegTestCase { @@ -97,7 +97,7 @@ public void run() { ensureGreen("myindex"); refresh(); - assertThat(prepareSearch("myindex").get().getHits().getTotalHits().value, equalTo(10L)); + assertHitCount(prepareSearch("myindex"), 10); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 746ddc56870ae..09c14df3566af 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -105,10 +105,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { logger.info("--> verify we get the data back"); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(100L) - ); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } String masterNode = internalCluster().getMasterName(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java index 04fba1f46074f..33719df372fb1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java @@ -31,6 +31,7 @@ import java.util.Set; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @@ -51,7 +52,7 @@ public void testDecommissionNodeNoReplicas() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); final boolean closed = randomBoolean(); if (closed) { @@ -79,7 +80,7 @@ public void testDecommissionNodeNoReplicas() { } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } public void testAutoExpandReplicasToFilteredNodes() { @@ -132,7 +133,7 @@ public void testDisablingAllocationFiltering() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); final boolean closed = randomBoolean(); if (closed) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java index 5ea78a6b1e3a0..e8234fb09512b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java @@ -23,7 +23,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class ClusterDisruptionCleanSettingsIT extends ESIntegTestCase { @@ -63,6 +63,6 @@ public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Excep IndicesStoreIntegrationIT.relocateAndBlockCompletion(logger, "test", 0, node_1, node_2); // now search for the documents and see if we get a reply - assertThat(prepareSearch().setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0), 100); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java index e1ab2bdc2369e..1a8f928d9c10f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java @@ -48,7 +48,14 @@ public class SearchIdleIT extends ESSingleNodeTestCase { public void testAutomaticRefreshSearch() throws InterruptedException { - runTestAutomaticRefresh(numDocs -> client().prepareSearch("test").get().getHits().getTotalHits().value); + runTestAutomaticRefresh(numDocs -> { + var resp = client().prepareSearch("test").get(); + try { + return resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } + }); } public void testAutomaticRefreshGet() throws InterruptedException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index ce3fd98476725..658b9eadd772f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -408,7 +408,7 @@ public void testAllMissingStrict() throws Exception { expectThrows(IndexNotFoundException.class, () -> prepareSearch("test2", "test3").setQuery(matchAllQuery()).get()); // you should still be able to run empty searches without things blowing up - prepareSearch().setQuery(matchAllQuery()).get(); + prepareSearch().setQuery(matchAllQuery()).get().decRef(); } // For now don't handle closed indices @@ -681,7 +681,7 @@ private static void verify(ActionRequestBuilder requestBuilder, boolean fa }); } else { try { - requestBuilder.get(); + requestBuilder.get().decRef(); fail("IndexNotFoundException or IndexClosedException was expected"); } catch (IndexNotFoundException | IndexClosedException e) {} } @@ -694,7 +694,7 @@ private static void verify(ActionRequestBuilder requestBuilder, boolean fa assertThat(response.getResponses()[0].getResponse(), notNullValue()); }); } else { - requestBuilder.get(); + requestBuilder.get().decRef(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index f9f17d8e1ebbf..dd29823f8076f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -51,7 +51,7 @@ public void testNoopRequestBreaker() throws Exception { indexRandom(true, reqs); // A cardinality aggregation uses BigArrays and thus the REQUEST breaker - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addAggregation(cardinality("card").field("test")).get(); + client.prepareSearch("cb-test").setQuery(matchAllQuery()).addAggregation(cardinality("card").field("test")).get().decRef(); // no exception because the breaker is a noop } @@ -68,7 +68,7 @@ public void testNoopFielddataBreaker() throws Exception { indexRandom(true, reqs); // Sorting using fielddata and thus the FIELDDATA breaker - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get().decRef(); // no exception because the breaker is a noop } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index e726c8a08002a..705fb879e9125 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -193,7 +193,7 @@ public void testRamAccountingTermsEnum() throws Exception { indexRandom(true, false, true, reqs); // execute a search that loads field data (sorting on the "test" field) - client.prepareSearch("ramtest").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + client.prepareSearch("ramtest").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get().decRef(); // clear field data cache (thus setting the loaded field data back to 0) clearFieldData(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 6a52159c71ab9..2935efb4808a7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -159,7 +159,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc boolean success = false; try { // Sort by the string and numeric fields, to load them into field data - searchRequestBuilder.get(); + searchRequestBuilder.get().decRef(); success = true; } catch (SearchPhaseExecutionException ex) { logger.info("expected SearchPhaseException: [{}]", ex.getMessage()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 762bbdda77df1..2cbc3477cb49d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -933,7 +933,7 @@ private IndicesStatsResponse createAndPopulateIndex(String name, int nodeCount, indexRandom(true, docs); flush(); - assertThat(prepareSearch(name).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs)); + assertHitCount(prepareSearch(name).setSize(0), numDocs); return indicesAdmin().prepareStats(name).get(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index 77d38410d1ea9..b66a0b0f3be44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -48,6 +48,7 @@ import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsClosed; import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsOpened; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -241,20 +242,22 @@ public void testCloseWhileRelocatingShards() throws Exception { ensureGreen(indices); for (String index : acknowledgedCloses) { - long docsCount = prepareSearch(index).setSize(0).setTrackTotalHits(true).get().getHits().getTotalHits().value; - assertEquals( - "Expected " - + docsPerIndex.get(index) - + " docs in index " - + index - + " but got " - + docsCount - + " (close acknowledged=" - + acknowledgedCloses.contains(index) - + ")", - (long) docsPerIndex.get(index), - docsCount - ); + assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { + long docsCount = response.getHits().getTotalHits().value; + assertEquals( + "Expected " + + docsPerIndex.get(index) + + " docs in index " + + index + + " but got " + + docsCount + + " (close acknowledged=" + + acknowledgedCloses.contains(index) + + ")", + (long) docsPerIndex.get(index), + docsCount + ); + }); } } finally { updateClusterSettings(Settings.builder().putNull(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java index ec62a1cbbd9bf..a98297e8b49ae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -82,6 +82,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.emptyCollectionOf; @@ -150,8 +151,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data... - prepareSearch().addSort("field", SortOrder.ASC).get(); - prepareSearch().addSort("field", SortOrder.ASC).get(); + prepareSearch().addSort("field", SortOrder.ASC).get().decRef(); + prepareSearch().addSort("field", SortOrder.ASC).get().decRef(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).get(); assertThat( @@ -166,8 +167,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); // sort to load it to field data... - prepareSearch().addSort("field2", SortOrder.ASC).get(); - prepareSearch().addSort("field2", SortOrder.ASC).get(); + prepareSearch().addSort("field2", SortOrder.ASC).get().decRef(); + prepareSearch().addSort("field2", SortOrder.ASC).get().decRef(); // now check the per field stats nodesStats = clusterAdmin().prepareNodesStats("data:true") @@ -264,8 +265,8 @@ public void testClearAllCaches() throws Exception { assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data and filter to load filter cache - prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value1")).addSort("field", SortOrder.ASC).get(); - prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value2")).addSort("field", SortOrder.ASC).get(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value1")).addSort("field", SortOrder.ASC).get().decRef(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value2")).addSort("field", SortOrder.ASC).get().decRef(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).get(); assertThat( @@ -355,10 +356,7 @@ public void testQueryCache() throws Exception { assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), equalTo(0L)); assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), equalTo(0L)); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -389,10 +387,7 @@ public void testQueryCache() throws Exception { }); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -407,29 +402,13 @@ public void testQueryCache() throws Exception { // test explicit request parameter - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(false) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(false), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L) ); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(true) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -440,24 +419,13 @@ public void testQueryCache() throws Exception { indicesAdmin().prepareClearCache().setRequestCache(true).get(); // clean the cache updateIndexSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false), "idx"); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L) ); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(true) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -983,7 +951,7 @@ public void testGroupsParam() throws Exception { prepareIndex("test1").setId(Integer.toString(1)).setSource("foo", "bar").get(); refresh(); - prepareSearch("_all").setStats("bar", "baz").get(); + prepareSearch("_all").setStats("bar", "baz").get().decRef(); IndicesStatsRequestBuilder builder = indicesAdmin().prepareStats(); IndicesStatsResponse stats = builder.get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index e53bcb0480d7b..0e14d80aaa0cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -43,7 +43,6 @@ import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -80,6 +79,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -133,7 +133,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20L); logger.info("--> start another node"); final String node_2 = internalCluster().startNode(); @@ -155,7 +155,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count again..."); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20); } public void testRelocationWhileIndexingRandom() throws Exception { @@ -229,35 +229,31 @@ public void testRelocationWhileIndexingRandom() throws Exception { logger.info("--> refreshing the index"); indicesAdmin().prepareRefresh("test").get(); logger.info("--> searching the index"); - boolean ranOnce = false; for (int i = 0; i < 10; i++) { + final int idx = i; logger.info("--> START search test round {}", i + 1); - SearchHits hits = prepareSearch("test").setQuery(matchAllQuery()) - .setSize((int) indexer.totalIndexedDocs()) - .storedFields() - .get() - .getHits(); - ranOnce = true; - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { - int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; - for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { - hitIds[hit] = hit + 1; - } - Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); - for (SearchHit hit : hits.getHits()) { - int id = Integer.parseInt(hit.getId()); - if (set.remove(id) == false) { - logger.error("Extra id [{}]", id); + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), + response -> { + var hits = response.getHits(); + if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; + for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { + hitIds[hit] = hit + 1; + } + Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); + for (SearchHit hit : hits.getHits()) { + int id = Integer.parseInt(hit.getId()); + if (set.remove(id) == false) { + logger.error("Extra id [{}]", id); + } + } + set.forEach(value -> logger.error("Missing id [{}]", value)); } + assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + logger.info("--> DONE search test round {}", idx + 1); } - set.forEach(value -> logger.error("Missing id [{}]", value)); - } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); - logger.info("--> DONE search test round {}", i + 1); - - } - if (ranOnce == false) { - fail(); + ); } } } @@ -570,7 +566,7 @@ public void testRelocateWhileWaitingForRefresh() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20); } public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { @@ -636,7 +632,7 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E assertTrue(pendingIndexResponses.stream().allMatch(ActionFuture::isDone)); }, 1, TimeUnit.MINUTES); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(120L)); + assertHitCount(prepareSearch("test").setSize(0), 120); } public void testRelocationEstablishedPeerRecoveryRetentionLeases() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 442a2dc99bda3..8fb56d17b93ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.XContentFactory; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; @@ -116,45 +117,24 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - - assertThat(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(0L)); - - assertThat( - prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> indexing with id [2], and routing [1] using alias"); @@ -162,111 +142,50 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with 0 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with 1 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with 0,1 indexRoutings , should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting("0", "1") - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); - assertThat(prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with two routing aliases , should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with alias0, alias1 and alias01, should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias0", "alias1", "alias01").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias0", "alias1", "alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with test, alias0 and alias1, should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("test", "alias0", "alias1").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("test", "alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -316,43 +235,20 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { logger.info("--> search with alias-a1,alias-b0, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - prepareSearch("alias-a1", "alias-b0").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias-a1", "alias-b0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with alias-ab, should find two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with alias-a0,alias-b1 should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias-a0", "alias-b1").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias-a0", "alias-b1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -374,7 +270,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro logger.info("--> search all on index_* should find two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); + assertHitCount(prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -420,11 +316,8 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying get and search with routing, should find"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "0").setRouting("3").get().isExists(), equalTo(true)); - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> creating alias with routing [4]"); @@ -432,11 +325,8 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying search with wrong routing should not find"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> creating alias with search routing [3,4] and index routing 4"); @@ -453,11 +343,8 @@ public void testIndexingAliasesOverTime() throws Exception { for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "0").setRouting("3").get().isExists(), equalTo(true)); assertThat(client().prepareGet("test", "1").setRouting("4").get().isExists(), equalTo(true)); - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java index 772d8767b7dd0..f59ec4d42089e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -35,6 +35,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentFactory; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -134,36 +135,19 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 1L); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } String secondRoutingValue = "1"; @@ -176,86 +160,42 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with {} routing, should find one", routingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with {} routing, should find one", secondRoutingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with {},{} indexRoutings , should find two", routingValue, "1"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue, secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue, secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + assertHitCount(prepareSearch().setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount( + prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), + 2 ); } logger.info("--> search with {},{},{} indexRoutings , should find two", routingValue, secondRoutingValue, routingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + assertHitCount( + prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue).setQuery(QueryBuilders.matchAllQuery()), + 2 ); - assertThat( + assertHitCount( prepareSearch().setSize(0) .setRouting(routingValue, secondRoutingValue, routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + .setQuery(QueryBuilders.matchAllQuery()), + 2 ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 19dfe598b5318..aaf218e3579be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -169,21 +169,25 @@ public void testCancellationOfScrollSearchesOnFollowupRequests() throws Exceptio logger.info("Executing search"); TimeValue keepAlive = TimeValue.timeValueSeconds(5); + String scrollId; SearchResponse searchResponse = prepareSearch("test").setScroll(keepAlive) .setSize(2) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) .get(); + try { + assertNotNull(searchResponse.getScrollId()); - assertNotNull(searchResponse.getScrollId()); + // Enable block so the second request would block + for (ScriptedBlockPlugin plugin : plugins) { + plugin.reset(); + plugin.enableBlock(); + } - // Enable block so the second request would block - for (ScriptedBlockPlugin plugin : plugins) { - plugin.reset(); - plugin.enableBlock(); + scrollId = searchResponse.getScrollId(); + logger.info("Executing scroll with id {}", scrollId); + } finally { + searchResponse.decRef(); } - - String scrollId = searchResponse.getScrollId(); - logger.info("Executing scroll with id {}", scrollId); ActionFuture scrollResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(keepAlive) .execute(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index e18c37aff783b..d4a4debbd61d6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -147,9 +147,11 @@ public void testDfsQueryThenFetch() throws Exception { ); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -184,9 +186,11 @@ public void testDfsQueryThenFetchWithSort() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -214,9 +218,11 @@ public void testQueryThenFetch() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(100 - total - i - 1))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -227,26 +233,29 @@ public void testQueryThenFetchWithFrom() throws Exception { Set collectedIds = new TreeSet<>(); - SearchResponse searchResponse = client().search( - new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH) - ).actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(60)); - for (int i = 0; i < 60; i++) { - SearchHit hit = searchResponse.getHits().getHits()[i]; - collectedIds.add(hit.getId()); - } - searchResponse = client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)) - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(40)); - for (int i = 0; i < 40; i++) { - SearchHit hit = searchResponse.getHits().getHits()[i]; - collectedIds.add(hit.getId()); - } - assertThat(collectedIds, equalTo(fullExpectedIds)); + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(60)); + for (int i = 0; i < 60; i++) { + SearchHit hit = searchResponse.getHits().getHits()[i]; + collectedIds.add(hit.getId()); + } + } + ); + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(40)); + for (int i = 0; i < 40; i++) { + SearchHit hit = searchResponse.getHits().getHits()[i]; + collectedIds.add(hit.getId()); + } + assertThat(collectedIds, equalTo(fullExpectedIds)); + } + ); } public void testQueryThenFetchWithSort() throws Exception { @@ -272,9 +281,11 @@ public void testQueryThenFetchWithSort() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -327,23 +338,27 @@ public void testFailedSearchWithWrongFrom() throws Exception { logger.info("Start Testing failed search with wrong from"); SearchSourceBuilder source = searchSource().query(termQuery("multi", "test")).from(1000).size(20).explain(true); - SearchResponse response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertThat(response.getHits().getHits().length, equalTo(0)); - assertThat(response.getTotalShards(), equalTo(test.numPrimaries)); - assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries)); - assertThat(response.getFailedShards(), equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); + assertResponse(client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), response -> { + assertThat(response.getHits().getHits().length, equalTo(0)); + assertThat(response.getTotalShards(), equalTo(test.numPrimaries)); + assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries)); + assertThat(response.getFailedShards(), equalTo(0)); + }); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); logger.info("Done Testing failed search"); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 379cdfc990207..d21619f4e6f89 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -372,7 +372,10 @@ public void testClusterDetailsAfterCCSWithFailuresOnRemoteClusterOnly() throws E boolean minimizeRoundtrips = TransportSearchAction.shouldMinimizeRoundtrips(searchRequest); - client(LOCAL_CLUSTER).search(searchRequest, queryFuture); + client(LOCAL_CLUSTER).search(searchRequest, queryFuture.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + })); assertBusy(() -> assertTrue(queryFuture.isDone())); // dfs=true overrides the minimize_roundtrips=true setting and does not minimize roundtrips @@ -612,7 +615,10 @@ public void testRemoteClusterOnlyCCSWithFailuresOnAllShards() throws Exception { boolean minimizeRoundtrips = TransportSearchAction.shouldMinimizeRoundtrips(searchRequest); - client(LOCAL_CLUSTER).search(searchRequest, queryFuture); + client(LOCAL_CLUSTER).search(searchRequest, queryFuture.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + })); assertBusy(() -> assertTrue(queryFuture.isDone())); if (skipUnavailable == false || minimizeRoundtrips == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 5c189c0c6c96a..ab72dbd4db707 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -378,8 +378,7 @@ public void testEnsureNoNegativeOffsets() throws Exception { assertNotHighlighted( prepareSearch().setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) - .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) - .get(), + .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")), 0, "no_long_term" ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index c608c253c851b..c67bdf82b5c2c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -803,7 +803,7 @@ public void testFromSize() throws Exception { request.setSize(4); request.addRescorer(new QueryRescorerBuilder(matchAllQuery()), 50); - assertEquals(4, request.get().getHits().getHits().length); + assertResponse(request, response -> assertEquals(4, response.getHits().getHits().length)); } public void testRescorePhaseWithInvalidSort() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 2d77e170abdc5..2d6bb8176b091 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -447,8 +447,13 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio } public void testEquivalence() { - - final int numDocs = (int) prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value; + var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); + final int numDocs; + try { + numDocs = (int) response.getHits().getTotalHits().value; + } finally { + response.decRef(); + } int numIters = scaledRandomIntBetween(5, 10); for (int i = 0; i < numIters; i++) { { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 20b9ce38254c3..433f004acdd77 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -232,13 +232,13 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { final String customPreference = randomAlphaOfLength(10); - final String nodeId = prepareSearch("test").setQuery(matchAllQuery()) - .setPreference(customPreference) - .get() - .getHits() - .getAt(0) - .getShard() - .getNodeId(); + final String nodeId; + var response = prepareSearch("test").setQuery(matchAllQuery()).setPreference(customPreference).get(); + try { + nodeId = response.getHits().getAt(0).getShard().getNodeId(); + } finally { + response.decRef(); + } assertSearchesSpecificNode("test", customPreference, nodeId); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 1362b0166a709..816fe48e5d97f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -65,7 +65,7 @@ public void testNodeSelection() { // Now after more searches, we should select a node with the lowest ARS rank. for (int i = 0; i < 5; i++) { - client.prepareSearch().setQuery(matchAllQuery()).get(); + client.prepareSearch().setQuery(matchAllQuery()).get().decRef(); } ClusterStateResponse clusterStateResponse = client.admin().cluster().prepareState().get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index e89e51a60fa23..036467b8d0774 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -30,6 +30,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -37,56 +38,61 @@ public class DuelScrollIT extends ESIntegTestCase { public void testDuelQueryThenFetch() throws Exception { TestContext context = create(SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH); - SearchResponse control = prepareSearch("index").setSearchType(context.searchType) - .addSort(context.sort) - .setSize(context.numDocs) - .get(); - assertNoFailures(control); - SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); - assertThat(sh.getHits().length, equalTo(context.numDocs)); + assertNoFailuresAndResponse( + prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), + control -> { + SearchHits sh = control.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getHits().length, equalTo(context.numDocs)); - SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) - .addSort(context.sort) - .setSize(context.scrollRequestSize) - .setScroll("10m") - .get(); + SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) + .addSort(context.sort) + .setSize(context.scrollRequestSize) + .setScroll("10m") + .get(); + try { - assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); - assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); + assertNoFailures(searchScrollResponse); + assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); - int counter = 0; - for (SearchHit hit : searchScrollResponse.getHits()) { - assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); - } + int counter = 0; + for (SearchHit hit : searchScrollResponse.getHits()) { + assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); + } - int iter = 1; - String scrollId = searchScrollResponse.getScrollId(); - while (true) { - searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll("10m").get(); - assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); - if (searchScrollResponse.getHits().getHits().length == 0) { - break; - } + int iter = 1; + String scrollId = searchScrollResponse.getScrollId(); + while (true) { + searchScrollResponse.decRef(); + searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll("10m").get(); + assertNoFailures(searchScrollResponse); + assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + if (searchScrollResponse.getHits().getHits().length == 0) { + break; + } - int expectedLength; - int scrollSlice = ++iter * context.scrollRequestSize; - if (scrollSlice <= context.numDocs) { - expectedLength = context.scrollRequestSize; - } else { - expectedLength = context.scrollRequestSize - (scrollSlice - context.numDocs); - } - assertThat(searchScrollResponse.getHits().getHits().length, equalTo(expectedLength)); - for (SearchHit hit : searchScrollResponse.getHits()) { - assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); - } - scrollId = searchScrollResponse.getScrollId(); - } + int expectedLength; + int scrollSlice = ++iter * context.scrollRequestSize; + if (scrollSlice <= context.numDocs) { + expectedLength = context.scrollRequestSize; + } else { + expectedLength = context.scrollRequestSize - (scrollSlice - context.numDocs); + } + assertThat(searchScrollResponse.getHits().getHits().length, equalTo(expectedLength)); + for (SearchHit hit : searchScrollResponse.getHits()) { + assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); + } + scrollId = searchScrollResponse.getScrollId(); + } - assertThat(counter, equalTo(context.numDocs)); - clearScroll(scrollId); + assertThat(counter, equalTo(context.numDocs)); + clearScroll(scrollId); + } finally { + searchScrollResponse.decRef(); + } + } + ); } private TestContext create(SearchType... searchTypes) throws Exception { @@ -213,47 +219,51 @@ private int createIndex(boolean singleShard) throws Exception { private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int numDocs) throws Exception { final int size = scaledRandomIntBetween(5, numDocs + 5); - final SearchResponse control = prepareSearch("test").setSearchType(searchType) - .setSize(numDocs) - .setQuery(QueryBuilders.matchQuery("foo", "true")) - .addSort(SortBuilders.fieldSort("_doc")) - .setTrackScores(trackScores) - .get(); - assertNoFailures(control); + assertNoFailuresAndResponse( + prepareSearch("test").setSearchType(searchType) + .setSize(numDocs) + .setQuery(QueryBuilders.matchQuery("foo", "true")) + .addSort(SortBuilders.fieldSort("_doc")) + .setTrackScores(trackScores), + control -> { - SearchResponse scroll = prepareSearch("test").setSearchType(searchType) - .setSize(size) - .setQuery(QueryBuilders.matchQuery("foo", "true")) - .addSort(SortBuilders.fieldSort("_doc")) - .setTrackScores(trackScores) - .setScroll("10m") - .get(); + SearchResponse scroll = prepareSearch("test").setSearchType(searchType) + .setSize(size) + .setQuery(QueryBuilders.matchQuery("foo", "true")) + .addSort(SortBuilders.fieldSort("_doc")) + .setTrackScores(trackScores) + .setScroll("10m") + .get(); - int scrollDocs = 0; - try { - while (true) { - assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); - assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); - if (scroll.getHits().getHits().length == 0) { - break; + int scrollDocs = 0; + try { + while (true) { + assertNoFailures(scroll); + assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); + if (scroll.getHits().getHits().length == 0) { + break; + } + for (int i = 0; i < scroll.getHits().getHits().length; ++i) { + SearchHit controlHit = control.getHits().getAt(scrollDocs + i); + SearchHit scrollHit = scroll.getHits().getAt(i); + assertEquals(controlHit.getId(), scrollHit.getId()); + } + scrollDocs += scroll.getHits().getHits().length; + scroll.decRef(); + scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll("10m").get(); + } + assertEquals(control.getHits().getTotalHits().value, scrollDocs); + } catch (AssertionError e) { + logger.info("Control:\n{}", control); + logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); + throw e; + } finally { + clearScroll(scroll.getScrollId()); + scroll.decRef(); } - for (int i = 0; i < scroll.getHits().getHits().length; ++i) { - SearchHit controlHit = control.getHits().getAt(scrollDocs + i); - SearchHit scrollHit = scroll.getHits().getAt(i); - assertEquals(controlHit.getId(), scrollHit.getId()); - } - scrollDocs += scroll.getHits().getHits().length; - scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll("10m").get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); - } catch (AssertionError e) { - logger.info("Control:\n{}", control); - logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); - throw e; - } finally { - clearScroll(scroll.getScrollId()); - } + ); } public void testDuelIndexOrderQueryThenFetch() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index e8b3cfdb1768a..28723a09355a9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -47,8 +47,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -92,6 +94,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -100,6 +103,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -109,6 +113,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { } } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -146,6 +151,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } for (int i = 0; i < 32; i++) { + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -156,6 +162,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } // and now, the last one is one + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -165,6 +172,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } // a the last is zero + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -175,6 +183,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -196,11 +205,11 @@ public void testScrollAndUpdateIndex() throws Exception { indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 0); SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("user:kimchy")) .setSize(35) @@ -214,23 +223,19 @@ public void testScrollAndUpdateIndex() throws Exception { map.put("message", "update"); prepareIndex("test").setId(searchHit.getId()).setSource(map).get(); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); } while (searchResponse.getHits().getHits().length > 0); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat( - prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(500L) - ); - assertThat( - prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(500L) - ); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 500); } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -246,12 +251,24 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); + long counter1 = 0; + long counter2 = 0; + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); + } SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) @@ -259,36 +276,36 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - - long counter1 = 0; - long counter2 = 0; - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); - } - - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); } - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } ClearScrollResponse clearResponse = client().prepareClearScroll() @@ -361,12 +378,24 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); + long counter1 = 0; + long counter2 = 0; + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); + } SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) @@ -374,36 +403,36 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - - long counter1 = 0; - long counter2 = 0; - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); - } - - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); } - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } ClearScrollResponse clearResponse = client().prepareClearScroll().addScrollId("_all").get(); @@ -447,6 +476,7 @@ public void testDeepScrollingDoesNotBlowUp() throws Exception { if (scrollId != null) { clearScroll(scrollId); } + response.decRef(); } } } @@ -456,12 +486,16 @@ public void testThatNonExistingScrollIdReturnsCorrectException() throws Exceptio refresh(); SearchResponse searchResponse = prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + try { + assertThat(searchResponse.getScrollId(), is(notNullValue())); - ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); - assertThat(clearScrollResponse.isSucceeded(), is(true)); + ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); + assertThat(clearScrollResponse.isSucceeded(), is(true)); - assertRequestBuilderThrows(internalCluster().client().prepareSearchScroll(searchResponse.getScrollId()), RestStatus.NOT_FOUND); + assertRequestBuilderThrows(internalCluster().client().prepareSearchScroll(searchResponse.getScrollId()), RestStatus.NOT_FOUND); + } finally { + searchResponse.decRef(); + } } public void testStringSortMissingAscTerminates() throws Exception { @@ -471,30 +505,29 @@ public void testStringSortMissingAscTerminates() throws Exception { prepareIndex("test").setId("1").setSource("some_field", "test").get(); refresh(); - SearchResponse response = prepareSearch("test") - - .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")) - .setScroll("1m") - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); - - response = client().prepareSearchScroll(response.getScrollId()).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertNoSearchHits(response); - - response = prepareSearch("test") - - .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")) - .setScroll("1m") - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); + assertResponse( + prepareSearch("test").addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")).setScroll("1m"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertNoFailuresAndResponse(client().prepareSearchScroll(response.getScrollId()), response2 -> { + assertHitCount(response2, 1); + assertNoSearchHits(response2); + }); + } + ); - response = client().prepareSearchScroll(response.getScrollId()).get(); - assertHitCount(response, 1); - assertThat(response.getHits().getHits().length, equalTo(0)); + assertResponse( + prepareSearch("test").addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")).setScroll("1m"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertResponse(client().prepareSearchScroll(response.getScrollId()), response2 -> { + assertHitCount(response2, 1); + assertThat(response2.getHits().getHits().length, equalTo(0)); + }); + } + ); } public void testCloseAndReopenOrDeleteWithActiveScroll() { @@ -503,17 +536,17 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).get(); } refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(35) - .setScroll(TimeValue.timeValueMinutes(2)) - .addSort("field", SortOrder.ASC) - .get(); - long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); - } + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), + searchResponse -> { + long counter = 0; + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); + } + } + ); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose("test")); assertAcked(indicesAdmin().prepareOpen("test")); @@ -572,18 +605,18 @@ public void testInvalidScrollKeepAlive() throws IOException { assertNotNull(illegalArgumentException); assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (2h) is too large")); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)).get(); - assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - - exc = expectThrows( - Exception.class, - () -> client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueHours(3)).get() - ); - illegalArgumentException = (IllegalArgumentException) ExceptionsHelper.unwrap(exc, IllegalArgumentException.class); - assertNotNull(illegalArgumentException); - assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (3h) is too large")); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { + assertNotNull(searchResponse.getScrollId()); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + Exception ex = expectThrows( + Exception.class, + () -> client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueHours(3)).get() + ); + IllegalArgumentException iae = (IllegalArgumentException) ExceptionsHelper.unwrap(ex, IllegalArgumentException.class); + assertNotNull(iae); + assertThat(iae.getMessage(), containsString("Keep alive for request (3h) is too large")); + }); } /** @@ -614,13 +647,18 @@ public void testScrollRewrittenToMatchNoDocs() { assertNoFailures(resp); while (resp.getHits().getHits().length > 0) { totalHits += resp.getHits().getHits().length; - resp = client().prepareSearchScroll(resp.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + final String scrollId = resp.getScrollId(); + resp.decRef(); + resp = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(1)).get(); assertNoFailures(resp); } assertThat(totalHits, equalTo(2)); } finally { - if (resp != null && resp.getScrollId() != null) { - client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + if (resp != null) { + if (resp.getScrollId() != null) { + client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + } + resp.decRef(); } } } @@ -635,26 +673,38 @@ public void testRestartDataNodesDuringScrollSearch() throws Exception { index("prod", "prod-" + i, Map.of()); } indicesAdmin().prepareRefresh().get(); + final String respFromDemoIndexScrollId; SearchResponse respFromDemoIndex = prepareSearch("demo").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); + try { + respFromDemoIndexScrollId = respFromDemoIndex.getScrollId(); + } finally { + respFromDemoIndex.decRef(); + } internalCluster().restartNode(dataNode, new InternalTestCluster.RestartCallback()); ensureGreen("demo", "prod"); + final String respFromProdIndexScrollId; SearchResponse respFromProdIndex = prepareSearch("prod").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); - assertNoFailures(respFromProdIndex); + try { + assertNoFailures(respFromProdIndex); + respFromProdIndexScrollId = respFromProdIndex.getScrollId(); + } finally { + respFromProdIndex.decRef(); + } SearchPhaseExecutionException error = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearchScroll(respFromDemoIndex.getScrollId()).get() + () -> client().prepareSearchScroll(respFromDemoIndexScrollId).get() ); for (ShardSearchFailure shardSearchFailure : error.shardFailures()) { assertThat(shardSearchFailure.getCause().getMessage(), containsString("No search context found for id [1]")); } - client().prepareSearchScroll(respFromProdIndex.getScrollId()).get(); + client().prepareSearchScroll(respFromProdIndexScrollId).get().decRef(); } private void assertToXContentResponse(ClearScrollResponse response, boolean succeed, int numFreed) throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java index 23a38c0608490..42be70e5ff8b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -62,30 +62,37 @@ public void testScanScrollWithShardExceptions() throws Exception { .setSize(10) .setScroll(TimeValue.timeValueMinutes(1)) .get(); - assertAllSuccessful(searchResponse); - long numHits = 0; - do { - numHits += searchResponse.getHits().getHits().length; - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + try { assertAllSuccessful(searchResponse); - } while (searchResponse.getHits().getHits().length > 0); - assertThat(numHits, equalTo(100L)); - clearScroll("_all"); + long numHits = 0; + do { + numHits += searchResponse.getHits().getHits().length; + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertAllSuccessful(searchResponse); + } while (searchResponse.getHits().getHits().length > 0); + assertThat(numHits, equalTo(100L)); + clearScroll("_all"); - internalCluster().stopRandomNonMasterNode(); + internalCluster().stopRandomNonMasterNode(); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards())); - numHits = 0; - int numberOfSuccessfulShards = searchResponse.getSuccessfulShards(); - do { - numHits += searchResponse.getHits().getHits().length; - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertThat(searchResponse.getSuccessfulShards(), equalTo(numberOfSuccessfulShards)); - } while (searchResponse.getHits().getHits().length > 0); - assertThat(numHits, greaterThan(0L)); + searchResponse.decRef(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards())); + numHits = 0; + int numberOfSuccessfulShards = searchResponse.getSuccessfulShards(); + do { + numHits += searchResponse.getHits().getHits().length; + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertThat(searchResponse.getSuccessfulShards(), equalTo(numberOfSuccessfulShards)); + } while (searchResponse.getHits().getHits().length > 0); + assertThat(numHits, greaterThan(0L)); - clearScroll(searchResponse.getScrollId()); + clearScroll(searchResponse.getScrollId()); + } finally { + searchResponse.decRef(); + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 6219c1b72253a..d76031d402af0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -50,6 +50,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayWithSize; @@ -157,15 +159,18 @@ public void testWithNullStrings() throws InterruptedException { prepareIndex("test").setId("0").setSource("field1", 0), prepareIndex("test").setId("1").setSource("field1", 100, "field2", "toto") ); - SearchResponse searchResponse = prepareSearch("test").addSort("field1", SortOrder.ASC) - .addSort("field2", SortOrder.ASC) - .setQuery(matchAllQuery()) - .searchAfter(new Object[] { 0, null }) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + assertResponse( + prepareSearch("test").addSort("field1", SortOrder.ASC) + .addSort("field2", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[] { 0, null }), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); + assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); + assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + } + ); } public void testWithSimpleTypes() throws Exception { @@ -229,31 +234,36 @@ public void testWithCustomFormatSortValueOfDateField() throws Exception { .add(new IndexRequest("test").id("5").source("start_date", "2017-01-20", "end_date", "2025-05-28")) .get(); - SearchResponse resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("22/01/2015", "2022-07-23")); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("21/02/2016", "2024-03-24")); - - resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) - .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", "2025-05-28")); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", "2021-02-22")); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("22/01/2015", "2022-07-23")); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("21/02/2016", "2024-03-24")); + } + ); - resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date")) // it's okay because end_date has the format "yyyy-MM-dd" - .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", 1748390400000L)); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", 1613952000000L)); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) + .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", "2025-05-28")); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", "2021-02-22")); + } + ); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date")) // it's okay because end_date has the format "yyyy-MM-dd" + .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", 1748390400000L)); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", 1613952000000L)); + } + ); SearchRequestBuilder searchRequest = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date").setFormat("epoch_millis")) @@ -332,11 +342,15 @@ private void assertSearchFromWithSortValues(String indexName, List> req.searchAfter(sortValues); } SearchResponse searchResponse = req.get(); - for (SearchHit hit : searchResponse.getHits()) { - List toCompare = convertSortValues(documents.get(offset++)); - assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.getSortValues())), equalTo(0)); + try { + for (SearchHit hit : searchResponse.getHits()) { + List toCompare = convertSortValues(documents.get(offset++)); + assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.getSortValues())), equalTo(0)); + } + sortValues = searchResponse.getHits().getHits()[searchResponse.getHits().getHits().length - 1].getSortValues(); + } finally { + searchResponse.decRef(); } - sortValues = searchResponse.getHits().getHits()[searchResponse.getHits().getHits().length - 1].getSortValues(); } } @@ -445,11 +459,13 @@ public void testScrollAndSearchAfterWithBigIndex() { assertThat(((Number) timestamp).longValue(), equalTo(timestamps.get(foundHits))); foundHits++; } + resp.decRef(); resp = client().prepareSearchScroll(resp.getScrollId()).setScroll(TimeValue.timeValueMinutes(5)).get(); } while (resp.getHits().getHits().length > 0); assertThat(foundHits, equalTo(timestamps.size())); } finally { client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + resp.decRef(); } } // search_after with sort with point in time @@ -479,11 +495,13 @@ public void testScrollAndSearchAfterWithBigIndex() { assertNotNull(after); assertThat("Sorted by timestamp and pit tier breaker", after, arrayWithSize(2)); searchRequest.source().searchAfter(after); + resp.decRef(); resp = client().search(searchRequest).actionGet(); } while (resp.getHits().getHits().length > 0); assertThat(foundHits, equalTo(timestamps.size())); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitID)).actionGet(); + resp.decRef(); } } @@ -512,12 +530,14 @@ public void testScrollAndSearchAfterWithBigIndex() { assertNotNull(after); assertThat("sorted by pit tie breaker", after, arrayWithSize(1)); searchRequest.source().searchAfter(after); + resp.decRef(); resp = client().search(searchRequest).actionGet(); } while (resp.getHits().getHits().length > 0); Collections.sort(foundSeqNos); assertThat(foundSeqNos, equalTo(timestamps)); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitID)).actionGet(); + resp.decRef(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 527d8bed8bc68..93340bedbdae3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -39,6 +39,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -111,8 +112,8 @@ public void testWithPreferenceAndRoutings() throws Exception { int numShards = 10; int totalDocs = randomIntBetween(100, 1000); setupIndex(totalDocs, numShards); - { - SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0).get(); + + assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -122,9 +123,9 @@ public void testWithPreferenceAndRoutings() throws Exception { .setPreference("_shards:1,4") .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } - { - SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0).get(); + }); + + assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -134,15 +135,15 @@ public void testWithPreferenceAndRoutings() throws Exception { .setRouting("foo", "bar") .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } - { - assertAcked( - indicesAdmin().prepareAliases() - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) - ); - SearchResponse sr = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0).get(); + }); + + assertAcked( + indicesAdmin().prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) + ); + assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -151,7 +152,7 @@ public void testWithPreferenceAndRoutings() throws Exception { .setSize(fetchSize) .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } + }); } private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String field, int numSlice, int numDocs) { @@ -160,27 +161,32 @@ private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String f for (int id = 0; id < numSlice; id++) { SliceBuilder sliceBuilder = new SliceBuilder(field, id, numSlice); SearchResponse searchResponse = request.slice(sliceBuilder).get(); - totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; - int numSliceResults = searchResponse.getHits().getHits().length; - String scrollId = searchResponse.getScrollId(); - for (SearchHit hit : searchResponse.getHits().getHits()) { - assertTrue(keys.add(hit.getId())); - } - while (searchResponse.getHits().getHits().length > 0) { - searchResponse = client().prepareSearchScroll("test") - .setScrollId(scrollId) - .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) - .get(); - scrollId = searchResponse.getScrollId(); + try { totalResults += searchResponse.getHits().getHits().length; - numSliceResults += searchResponse.getHits().getHits().length; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int numSliceResults = searchResponse.getHits().getHits().length; + String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { assertTrue(keys.add(hit.getId())); } + while (searchResponse.getHits().getHits().length > 0) { + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll("test") + .setScrollId(scrollId) + .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) + .get(); + scrollId = searchResponse.getScrollId(); + totalResults += searchResponse.getHits().getHits().length; + numSliceResults += searchResponse.getHits().getHits().length; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + clearScroll(scrollId); + } finally { + searchResponse.decRef(); } - assertThat(numSliceResults, equalTo(expectedSliceResults)); - clearScroll(scrollId); } assertThat(totalResults, equalTo(numDocs)); assertThat(keys.size(), equalTo(numDocs)); @@ -222,24 +228,29 @@ private void assertSearchSlicesWithPointInTime(String sliceField, String sortFie .setSize(randomIntBetween(10, 100)); SearchResponse searchResponse = request.get(); - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + try { + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; - while (true) { - int numHits = searchResponse.getHits().getHits().length; - if (numHits == 0) { - break; - } + while (true) { + int numHits = searchResponse.getHits().getHits().length; + if (numHits == 0) { + break; + } - totalResults += numHits; - numSliceResults += numHits; - for (SearchHit hit : searchResponse.getHits().getHits()) { - assertTrue(keys.add(hit.getId())); - } + totalResults += numHits; + numSliceResults += numHits; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } - Object[] sortValues = searchResponse.getHits().getHits()[numHits - 1].getSortValues(); - searchResponse = request.searchAfter(sortValues).get(); + Object[] sortValues = searchResponse.getHits().getHits()[numHits - 1].getSortValues(); + searchResponse.decRef(); + searchResponse = request.searchAfter(sortValues).get(); + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + } finally { + searchResponse.decRef(); } - assertThat(numSliceResults, equalTo(expectedSliceResults)); } assertThat(totalResults, equalTo(numDocs)); assertThat(keys.size(), equalTo(numDocs)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 2967bdc454aed..4a10bf6cf8fab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -86,8 +86,8 @@ public void testWithRouting() { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - - response = prepareSearch("test").storedFields("_none_").get(); + }); + assertResponse(prepareSearch("test").storedFields("_none_"), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 0d850a3708044..160cba19700ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -119,7 +119,8 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio .addAggregation(AggregationBuilders.terms("agg1").field("field.keyword")) .setSize(0) .setPreference("fixed") - .get(); + .get() + .decRef(); stats = aggregated(client().execute(FieldUsageStatsAction.INSTANCE, new FieldUsageStatsRequest()).get().getStats().get("test")); logger.info("Stats after second query: {}", stats); @@ -148,7 +149,8 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio .setQuery(QueryBuilders.rangeQuery("date_field").from("2016/01/01")) .setSize(100) .setPreference("fixed") - .get(); + .get() + .decRef(); stats = aggregated(client().execute(FieldUsageStatsAction.INSTANCE, new FieldUsageStatsRequest()).get().getStats().get("test")); logger.info("Stats after third query: {}", stats); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index b5f7468d1645c..9ca565cef7843 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -58,6 +58,7 @@ import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; @@ -945,10 +946,11 @@ public void testThatStatsAreWorking() throws Exception { ensureGreen(); // load the fst index into ram prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f"))) - .get(); + .get() + .decRef(); prepareSearch(INDEX).suggest( new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f")) - ).get(); + ).get().decRef(); // Get all stats IndicesStatsResponse indicesStatsResponse = indicesAdmin().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get(); @@ -1278,7 +1280,7 @@ public void testPrunedSegments() throws IOException { refresh(); assertSuggestions("b"); - assertThat(2L, equalTo(prepareSearch(INDEX).setSize(0).get().getHits().getTotalHits().value)); + assertHitCount(prepareSearch(INDEX).setSize(0), 2); for (IndexShardSegments seg : indicesAdmin().prepareSegments().get().getIndices().get(INDEX)) { ShardSegments[] shards = seg.shards(); for (ShardSegments shardSegments : shards) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 42c19a903b452..b04aa321f70f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -55,6 +55,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -953,7 +954,7 @@ public void testQueuedSnapshotsWaitingForShardReady() throws Exception { indexDoc(testIndex, Integer.toString(i), "foo", "bar" + i); } refresh(); - assertThat(prepareSearch(testIndex).setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch(testIndex).setSize(0), 100); logger.info("--> start relocations"); allowNodes(testIndex, 1); diff --git a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java index e6abe2f041a4c..a678956b20e59 100644 --- a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java @@ -26,6 +26,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -62,15 +63,17 @@ public void testKnnSearchRemovedVector() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 20, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .setSize(10) - .get(); - - // Originally indexed 20 documents, but deleted vector field with an update, so only 19 should be hit - assertHitCount(response, 19); - assertEquals(10, response.getHits().getHits().length); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .setSize(10), + response -> { + // Originally indexed 20 documents, but deleted vector field with an update, so only 19 should be hit + assertHitCount(response, 19); + assertEquals(10, response.getHits().getHits().length); + } + ); // Make sure we still have 20 docs assertHitCount(client().prepareSearch("index").setSize(0).setTrackTotalHits(true), 20); } @@ -104,19 +107,22 @@ public void testKnnWithQuery() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .addFetchField("*") - .setSize(10) - .get(); - - // The total hits is k plus the number of text matches - assertHitCount(response, 15); - assertEquals(10, response.getHits().getHits().length); - - // Because of the boost, vector results should appear first - assertNotNull(response.getHits().getAt(0).field("vector")); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .addFetchField("*") + .setSize(10), + response -> { + + // The total hits is k plus the number of text matches + assertHitCount(response, 15); + assertEquals(10, response.getHits().getHits().length); + + // Because of the boost, vector results should appear first + assertNotNull(response.getHits().getAt(0).field("vector")); + } + ); } public void testKnnFilter() throws IOException { @@ -150,13 +156,13 @@ public void testKnnFilter() throws IOException { KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).addFilterQuery( QueryBuilders.termsQuery("field", "second") ); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10).get(); - - assertHitCount(response, 5); - assertEquals(5, response.getHits().getHits().length); - for (SearchHit hit : response.getHits().getHits()) { - assertEquals("second", hit.field("field").getValue()); - } + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10), response -> { + assertHitCount(response, 5); + assertEquals(5, response.getHits().getHits().length); + for (SearchHit hit : response.getHits().getHits()) { + assertEquals("second", hit.field("field").getValue()); + } + }); } public void testKnnFilterWithRewrite() throws IOException { @@ -193,10 +199,10 @@ public void testKnnFilterWithRewrite() throws IOException { KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).addFilterQuery( QueryBuilders.termsLookupQuery("field", new TermsLookup("index", "lookup-doc", "other-field")) ); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).setSize(10).get(); - - assertHitCount(response, 5); - assertEquals(5, response.getHits().getHits().length); + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).setSize(10), response -> { + assertHitCount(response, 5); + assertEquals(5, response.getHits().getHits().length); + }); } public void testMultiKnnClauses() throws IOException { @@ -239,26 +245,29 @@ public void testMultiKnnClauses() throws IOException { float[] queryVector = randomVector(20f, 21f); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).boost(5.0f); KnnSearchBuilder knnSearch2 = new KnnSearchBuilder("vector_2", queryVector, 5, 50, null).boost(10.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch, knnSearch2)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - - // The total hits is k plus the number of text matches - assertHitCount(response, 20); - assertEquals(10, response.getHits().getHits().length); - InternalStats agg = response.getAggregations().get("stats"); - assertThat(agg.getCount(), equalTo(20L)); - assertThat(agg.getMax(), equalTo(3.0)); - assertThat(agg.getMin(), equalTo(1.0)); - assertThat(agg.getAvg(), equalTo(2.25)); - assertThat(agg.getSum(), equalTo(45.0)); - - // Because of the boost & vector distributions, vector_2 results should appear first - assertNotNull(response.getHits().getAt(0).field("vector_2")); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch, knnSearch2)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + response -> { + + // The total hits is k plus the number of text matches + assertHitCount(response, 20); + assertEquals(10, response.getHits().getHits().length); + InternalStats agg = response.getAggregations().get("stats"); + assertThat(agg.getCount(), equalTo(20L)); + assertThat(agg.getMax(), equalTo(3.0)); + assertThat(agg.getMin(), equalTo(1.0)); + assertThat(agg.getAvg(), equalTo(2.25)); + assertThat(agg.getSum(), equalTo(45.0)); + + // Because of the boost & vector distributions, vector_2 results should appear first + assertNotNull(response.getHits().getAt(0).field("vector_2")); + } + ); } public void testMultiKnnClausesSameDoc() throws IOException { @@ -298,38 +307,42 @@ public void testMultiKnnClausesSameDoc() throws IOException { // Having the same query vector and same docs should mean our KNN scores are linearly combined if the same doc is matched KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null); KnnSearchBuilder knnSearch2 = new KnnSearchBuilder("vector_2", queryVector, 5, 50, null); - SearchResponse responseOneKnn = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - SearchResponse responseBothKnn = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch, knnSearch2)) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - - // The total hits is k matched docs - assertHitCount(responseOneKnn, 5); - assertHitCount(responseBothKnn, 5); - assertEquals(5, responseOneKnn.getHits().getHits().length); - assertEquals(5, responseBothKnn.getHits().getHits().length); - - for (int i = 0; i < responseOneKnn.getHits().getHits().length; i++) { - SearchHit oneHit = responseOneKnn.getHits().getHits()[i]; - SearchHit bothHit = responseBothKnn.getHits().getHits()[i]; - assertThat(bothHit.getId(), equalTo(oneHit.getId())); - assertThat(bothHit.getScore(), greaterThan(oneHit.getScore())); - } - InternalStats oneAgg = responseOneKnn.getAggregations().get("stats"); - InternalStats bothAgg = responseBothKnn.getAggregations().get("stats"); - assertThat(bothAgg.getCount(), equalTo(oneAgg.getCount())); - assertThat(bothAgg.getAvg(), equalTo(oneAgg.getAvg())); - assertThat(bothAgg.getMax(), equalTo(oneAgg.getMax())); - assertThat(bothAgg.getSum(), equalTo(oneAgg.getSum())); - assertThat(bothAgg.getMin(), equalTo(oneAgg.getMin())); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + responseOneKnn -> assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch, knnSearch2)) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + responseBothKnn -> { + + // The total hits is k matched docs + assertHitCount(responseOneKnn, 5); + assertHitCount(responseBothKnn, 5); + assertEquals(5, responseOneKnn.getHits().getHits().length); + assertEquals(5, responseBothKnn.getHits().getHits().length); + + for (int i = 0; i < responseOneKnn.getHits().getHits().length; i++) { + SearchHit oneHit = responseOneKnn.getHits().getHits()[i]; + SearchHit bothHit = responseBothKnn.getHits().getHits()[i]; + assertThat(bothHit.getId(), equalTo(oneHit.getId())); + assertThat(bothHit.getScore(), greaterThan(oneHit.getScore())); + } + InternalStats oneAgg = responseOneKnn.getAggregations().get("stats"); + InternalStats bothAgg = responseBothKnn.getAggregations().get("stats"); + assertThat(bothAgg.getCount(), equalTo(oneAgg.getCount())); + assertThat(bothAgg.getAvg(), equalTo(oneAgg.getAvg())); + assertThat(bothAgg.getMax(), equalTo(oneAgg.getMax())); + assertThat(bothAgg.getSum(), equalTo(oneAgg.getSum())); + assertThat(bothAgg.getMin(), equalTo(oneAgg.getMin())); + } + ) + ); } public void testKnnFilteredAlias() throws IOException { @@ -366,10 +379,11 @@ public void testKnnFilteredAlias() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 10, 50, null); - SearchResponse response = client().prepareSearch("test-alias").setKnnSearch(List.of(knnSearch)).setSize(10).get(); - - assertHitCount(response, expectedHits); - assertEquals(expectedHits, response.getHits().getHits().length); + final int expectedHitCount = expectedHits; + assertResponse(client().prepareSearch("test-alias").setKnnSearch(List.of(knnSearch)).setSize(10), response -> { + assertHitCount(response, expectedHitCount); + assertEquals(expectedHitCount, response.getHits().getHits().length); + }); } public void testKnnSearchAction() throws IOException { @@ -399,14 +413,14 @@ public void testKnnSearchAction() throws IOException { // Since there's no kNN search action at the transport layer, we just emulate // how the action works (it builds a kNN query under the hood) float[] queryVector = randomVector(); - SearchResponse response = client().prepareSearch("index1", "index2") - .setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5, null)) - .setSize(2) - .get(); - - // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard - assertHitCount(response, 5 * 2); - assertEquals(2, response.getHits().getHits().length); + assertResponse( + client().prepareSearch("index1", "index2").setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5, null)).setSize(2), + response -> { + // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard + assertHitCount(response, 5 * 2); + assertEquals(2, response.getHits().getHits().length); + } + ); } public void testKnnVectorsWith4096Dims() throws IOException { @@ -434,11 +448,11 @@ public void testKnnVectorsWith4096Dims() throws IOException { float[] queryVector = randomVector(4096); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 3, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10).get(); - - assertHitCount(response, 3); - assertEquals(3, response.getHits().getHits().length); - assertEquals(4096, response.getHits().getAt(0).field("vector").getValues().size()); + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10), response -> { + assertHitCount(response, 3); + assertEquals(3, response.getHits().getHits().length); + assertEquals(4096, response.getHits().getAt(0).field("vector").getValues().size()); + }); } private float[] randomVector() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 430e66c116744..a02eddf039e46 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -313,6 +313,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean latchTriggered = new AtomicBoolean(); var results = new ArraySearchPhaseResults(shardsIter.size()); + final TestSearchResponse testResponse = new TestSearchResponse(); try { AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( "test", @@ -335,7 +336,6 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY ) { - final TestSearchResponse response = new TestSearchResponse(); @Override protected void executePhaseOnShard( @@ -343,7 +343,7 @@ protected void executePhaseOnShard( SearchShardTarget shard, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); + assertTrue("shard: " + shard.getShardId() + " has been queried twice", testResponse.queried.add(shard.getShardId())); Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), @@ -368,7 +368,7 @@ public void run() { assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); sendReleaseSearchContext(result.getContextId(), new MockConnection(result.node), OriginalIndices.NONE); } - responseListener.onResponse(response); + responseListener.onResponse(testResponse); if (latchTriggered.compareAndSet(false, true) == false) { throw new AssertionError("latch triggered twice"); } @@ -391,6 +391,7 @@ public void run() { final List runnables = executor.shutdownNow(); assertThat(runnables, equalTo(Collections.emptyList())); } finally { + testResponse.decRef(); results.decRef(); } } @@ -437,79 +438,82 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", AliasFilter.EMPTY); ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); - AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( - "test", - logger, - transportService, - (cluster, node) -> { - assert cluster == null : "cluster was not null: " + cluster; - return lookup.get(node); - }, - aliasFilters, - Collections.emptyMap(), - executor, - request, - responseListener, - shardsIter, - new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0), - ClusterState.EMPTY_STATE, - null, - new ArraySearchPhaseResults<>(shardsIter.size()), - request.getMaxConcurrentShardRequests(), - SearchResponse.Clusters.EMPTY - ) { - final TestSearchResponse response = new TestSearchResponse(); - - @Override - protected void executePhaseOnShard( - SearchShardIterator shardIt, - SearchShardTarget shard, - SearchActionListener listener + final TestSearchResponse response = new TestSearchResponse(); + try { + AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( + "test", + logger, + transportService, + (cluster, node) -> { + assert cluster == null : "cluster was not null: " + cluster; + return lookup.get(node); + }, + aliasFilters, + Collections.emptyMap(), + executor, + request, + responseListener, + shardsIter, + new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0), + ClusterState.EMPTY_STATE, + null, + new ArraySearchPhaseResults<>(shardsIter.size()), + request.getMaxConcurrentShardRequests(), + SearchResponse.Clusters.EMPTY ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); - final TestSearchPhaseResult testSearchPhaseResult; - if (shard.getShardId().id() == 0) { - testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); - } else { - testSearchPhaseResult = new TestSearchPhaseResult( - new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), - connection.getNode() - ); - Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); - ids.add(testSearchPhaseResult.getContextId()); - } - if (randomBoolean()) { - listener.onResponse(testSearchPhaseResult); - } else { - new Thread(() -> listener.onResponse(testSearchPhaseResult)).start(); + @Override + protected void executePhaseOnShard( + SearchShardIterator shardIt, + SearchShardTarget shard, + SearchActionListener listener + ) { + assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); + Transport.Connection connection = getConnection(null, shard.getNodeId()); + final TestSearchPhaseResult testSearchPhaseResult; + if (shard.getShardId().id() == 0) { + testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); + } else { + testSearchPhaseResult = new TestSearchPhaseResult( + new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), + connection.getNode() + ); + Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); + ids.add(testSearchPhaseResult.getContextId()); + } + if (randomBoolean()) { + listener.onResponse(testSearchPhaseResult); + } else { + new Thread(() -> listener.onResponse(testSearchPhaseResult)).start(); + } } - } - @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new SearchPhase("test") { - @Override - public void run() { - throw new RuntimeException("boom"); - } - }; + @Override + protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + return new SearchPhase("test") { + @Override + public void run() { + throw new RuntimeException("boom"); + } + }; + } + }; + asyncAction.start(); + latch.await(); + assertNotNull(failure.get()); + assertThat(failure.get().getCause().getMessage(), containsString("boom")); + assertFalse(nodeToContextMap.isEmpty()); + assertTrue(nodeToContextMap.toString(), nodeToContextMap.containsKey(primaryNode) || nodeToContextMap.containsKey(replicaNode)); + assertEquals(shardsIter.size() - 1, numFreedContext.get()); + if (nodeToContextMap.containsKey(primaryNode)) { + assertTrue(nodeToContextMap.get(primaryNode).toString(), nodeToContextMap.get(primaryNode).isEmpty()); + } else { + assertTrue(nodeToContextMap.get(replicaNode).toString(), nodeToContextMap.get(replicaNode).isEmpty()); } - }; - asyncAction.start(); - latch.await(); - assertNotNull(failure.get()); - assertThat(failure.get().getCause().getMessage(), containsString("boom")); - assertFalse(nodeToContextMap.isEmpty()); - assertTrue(nodeToContextMap.toString(), nodeToContextMap.containsKey(primaryNode) || nodeToContextMap.containsKey(replicaNode)); - assertEquals(shardsIter.size() - 1, numFreedContext.get()); - if (nodeToContextMap.containsKey(primaryNode)) { - assertTrue(nodeToContextMap.get(primaryNode).toString(), nodeToContextMap.get(primaryNode).isEmpty()); - } else { - assertTrue(nodeToContextMap.get(replicaNode).toString(), nodeToContextMap.get(replicaNode).isEmpty()); + final List runnables = executor.shutdownNow(); + assertThat(runnables, equalTo(Collections.emptyList())); + } finally { + response.decRef(); } - final List runnables = executor.shutdownNow(); - assertThat(runnables, equalTo(Collections.emptyList())); } public void testAllowPartialResults() throws InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index e250964c50687..dc6e69b15ee32 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -77,7 +77,14 @@ public void init() { private void addResponse(SearchResponseMerger searchResponseMerger, SearchResponse searchResponse) { if (randomBoolean()) { - executorService.submit(() -> searchResponseMerger.add(searchResponse)); + searchResponse.incRef(); + executorService.submit(() -> { + try { + searchResponseMerger.add(searchResponse); + } finally { + searchResponse.decRef(); + } + }); } else { searchResponseMerger.add(searchResponse); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index b02dea53bc8b9..b45a04922c187 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -271,7 +271,12 @@ static SearchResponse.Clusters createCCSClusterObject( * compare xContent, so we omit it here */ public void testFromXContent() throws IOException { - doFromXContentTestWithRandomFields(createTestItem(), false); + var response = createTestItem(); + try { + doFromXContentTestWithRandomFields(response, false); + } finally { + response.decRef(); + } } /** @@ -281,7 +286,12 @@ public void testFromXContent() throws IOException { * fields to SearchHits, Aggregations etc... is tested in their own tests */ public void testFromXContentWithRandomFields() throws IOException { - doFromXContentTestWithRandomFields(createMinimalTestItem(), true); + var response = createMinimalTestItem(); + try { + doFromXContentTestWithRandomFields(response, true); + } finally { + response.decRef(); + } } private void doFromXContentTestWithRandomFields(SearchResponse response, boolean addRandomFields) throws IOException { @@ -328,15 +338,15 @@ public void testFromXContentWithFailures() throws IOException { for (int i = 0; i < failures.length; i++) { failures[i] = ShardSearchFailureTests.createTestItem(IndexMetadata.INDEX_UUID_NA_VALUE); } + BytesReference originalBytes; SearchResponse response = createTestItem(failures); XContentType xcontentType = randomFrom(XContentType.values()); - final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); - BytesReference originalBytes = toShuffledXContent( - ChunkedToXContent.wrapAsToXContent(response), - xcontentType, - params, - randomBoolean() - ); + try { + final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); + originalBytes = toShuffledXContent(ChunkedToXContent.wrapAsToXContent(response), xcontentType, params, randomBoolean()); + } finally { + response.decRef(); + } try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) { SearchResponse parsed = SearchResponse.fromXContent(parser); try { @@ -388,26 +398,30 @@ public void testToXContent() throws IOException { ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 0, - "successful": 0, - "skipped": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ { "_id": "id1", "_score": 2.0 } ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 0, + "successful": 0, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 100, + "relation": "eq" + }, + "max_score": 1.5, + "hits": [ { "_id": "id1", "_score": 2.0 } ] + } + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } { SearchResponse response = new SearchResponse( @@ -428,34 +442,38 @@ public void testToXContent() throws IOException { ShardSearchFailure.EMPTY_ARRAY, new SearchResponse.Clusters(5, 3, 2) ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 0, - "successful": 0, - "skipped": 0, - "failed": 0 - }, - "_clusters": { - "total": 5, - "successful": 3, - "skipped": 2, - "running":0, - "partial": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ { "_id": "id1", "_score": 2.0 } ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 0, + "successful": 0, + "skipped": 0, + "failed": 0 + }, + "_clusters": { + "total": 5, + "successful": 3, + "skipped": 2, + "running":0, + "partial": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 100, + "relation": "eq" + }, + "max_score": 1.5, + "hits": [ { "_id": "id1", "_score": 2.0 } ] + } + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } { SearchResponse response = new SearchResponse( @@ -485,142 +503,154 @@ public void testToXContent() throws IOException { new ShardSearchFailure[] { new ShardSearchFailure(new IllegalStateException("corrupt index")) } ) ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 20, - "successful": 9, - "skipped": 2, - "failed": 0 - }, - "_clusters": { - "total": 4, - "successful": 1, - "skipped": 1, - "running":0, - "partial": 1, - "failed": 1, - "details": { - "(local)": { - "status": "successful", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 5, - "skipped": 1, - "failed": 0 - } - }, - "cluster_1": { - "status": "skipped", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 0, - "skipped": 0, - "failed": 5 - }, - "failures": [ - { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" - } - } - ] + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 20, + "successful": 9, + "skipped": 2, + "failed": 0 }, - "cluster_2": { - "status": "failed", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 0, - "skipped": 0, - "failed": 5 - }, - "failures": [ - { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" + "_clusters": { + "total": 4, + "successful": 1, + "skipped": 1, + "running":0, + "partial": 1, + "failed": 1, + "details": { + "(local)": { + "status": "successful", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "skipped": 1, + "failed": 0 } + }, + "cluster_1": { + "status": "skipped", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 0, + "skipped": 0, + "failed": 5 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] + }, + "cluster_2": { + "status": "failed", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 0, + "skipped": 0, + "failed": 5 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] + }, + "cluster_0": { + "status": "partial", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 4, + "skipped": 1, + "failed": 1 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] } - ] + } }, - "cluster_0": { - "status": "partial", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 4, - "skipped": 1, - "failed": 1 + "hits": { + "total": { + "value": 100, + "relation": "eq" }, - "failures": [ + "max_score": 1.5, + "hits": [ { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" - } + "_id": "id1", + "_score": 2.0 } ] } - } - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ - { - "_id": "id1", - "_score": 2.0 - } - ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } } public void testSerialization() throws IOException { SearchResponse searchResponse = createTestItem(false); - SearchResponse deserialized = copyWriteable( - searchResponse, - namedWriteableRegistry, - SearchResponse::new, - TransportVersion.current() - ); - if (searchResponse.getHits().getTotalHits() == null) { - assertNull(deserialized.getHits().getTotalHits()); - } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + try { + SearchResponse deserialized = copyWriteable( + searchResponse, + namedWriteableRegistry, + SearchResponse::new, + TransportVersion.current() + ); + try { + if (searchResponse.getHits().getTotalHits() == null) { + assertNull(deserialized.getHits().getTotalHits()); + } else { + assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); + assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + } + assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); + assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); + assertEquals(searchResponse.getFailedShards(), deserialized.getFailedShards()); + assertEquals(searchResponse.getTotalShards(), deserialized.getTotalShards()); + assertEquals(searchResponse.getSkippedShards(), deserialized.getSkippedShards()); + assertEquals(searchResponse.getClusters(), deserialized.getClusters()); + } finally { + deserialized.decRef(); + } + } finally { + searchResponse.decRef(); } - assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); - assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); - assertEquals(searchResponse.getFailedShards(), deserialized.getFailedShards()); - assertEquals(searchResponse.getTotalShards(), deserialized.getTotalShards()); - assertEquals(searchResponse.getSkippedShards(), deserialized.getSkippedShards()); - assertEquals(searchResponse.getClusters(), deserialized.getClusters()); } public void testToXContentEmptyClusters() throws IOException { @@ -634,15 +664,23 @@ public void testToXContentEmptyClusters() throws IOException { ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - SearchResponse deserialized = copyWriteable( - searchResponse, - namedWriteableRegistry, - SearchResponse::new, - TransportVersion.current() - ); - XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); - deserialized.getClusters().toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals(0, Strings.toString(builder).length()); + try { + SearchResponse deserialized = copyWriteable( + searchResponse, + namedWriteableRegistry, + SearchResponse::new, + TransportVersion.current() + ); + try { + XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); + deserialized.getClusters().toXContent(builder, ToXContent.EMPTY_PARAMS); + assertEquals(0, Strings.toString(builder).length()); + } finally { + deserialized.decRef(); + } + } finally { + searchResponse.decRef(); + } } public void testClustersHasRemoteCluster() { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 70bd2d9f00a05..1097174628e58 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -83,7 +83,12 @@ public void search(final SearchRequest request, final ActionListener 1L, SearchResponse.Clusters.EMPTY)); + var response = SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY); + try { + listener.onResponse(response); + } finally { + response.decRef(); + } } @Override @@ -161,18 +166,21 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - listener.onResponse( - new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) + var response = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 0, + 0, + 0, + 0L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY ); + try { + listener.onResponse(response); + } finally { + response.decRef(); + } }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 366161881d30f..7090d590a4901 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -542,7 +542,7 @@ public void testCCSRemoteReduceMergeFails() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -584,9 +584,12 @@ public void testCCSRemoteReduce() throws Exception { SearchRequest searchRequest = new SearchRequest(); final CountDownLatch latch = new CountDownLatch(1); SetOnce>> setOnce = new SetOnce<>(); - AtomicReference response = new AtomicReference<>(); + final SetOnce response = new SetOnce<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.incRef(); + response.set(newValue); + }), latch ); TransportSearchAction.ccsRemoteReduce( @@ -608,18 +611,25 @@ public void testCCSRemoteReduce() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - assertEquals(totalClusters, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + try { + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + assertEquals( + totalClusters, + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL) + ); + assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } } { SearchRequest searchRequest = new SearchRequest(); @@ -650,7 +660,7 @@ public void testCCSRemoteReduce() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -710,7 +720,7 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -728,9 +738,12 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti SearchRequest searchRequest = new SearchRequest(); final CountDownLatch latch = new CountDownLatch(1); SetOnce>> setOnce = new SetOnce<>(); - AtomicReference response = new AtomicReference<>(); + SetOnce response = new SetOnce<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.mustIncRef(); + response.set(newValue); + }), latch ); Set clusterAliases = new HashSet<>(remoteClusterService.getRegisteredRemoteClusterNames()); @@ -756,22 +769,26 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals( - disconnectedNodesIndices.size(), - searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) - ); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - int successful = totalClusters - disconnectedNodesIndices.size(); - assertEquals(successful, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(successful == 0 ? 0 : successful + 1, searchResponse.getNumReducePhases()); + try { + assertEquals( + disconnectedNodesIndices.size(), + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) + ); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + int successful = totalClusters - disconnectedNodesIndices.size(); + assertEquals(successful, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(successful == 0 ? 0 : successful + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } } // give transport service enough time to realize that the node is down, and to notify the connection listeners @@ -794,7 +811,10 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti SetOnce>> setOnce = new SetOnce<>(); AtomicReference response = new AtomicReference<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.mustIncRef(); + response.set(newValue); + }), latch ); Set clusterAliases = new HashSet<>(remoteClusterService.getRegisteredRemoteClusterNames()); @@ -820,18 +840,25 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - assertEquals(totalClusters, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + try { + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + assertEquals( + totalClusters, + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL) + ); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } }); assertEquals(0, service.getConnectionManager().size()); } finally { @@ -841,6 +868,15 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti } } + private static void resolveWithEmptySearchResponse(Tuple> tuple) { + var resp = emptySearchResponse(); + try { + tuple.v2().onResponse(resp); + } finally { + resp.decRef(); + } + } + public void testCollectSearchShards() throws Exception { int numClusters = randomIntBetween(2, 10); DiscoveryNode[] nodes = new DiscoveryNode[numClusters]; diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index a0eff567274dc..6a87c0f704600 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndicesRequestCache; @@ -18,7 +17,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -42,23 +41,23 @@ public void testQueryRewriteOnRefresh() throws Exception { // Search for a range and check that it missed the cache (since its the // first time it has run) - final SearchResponse r1 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r1); - assertThat(r1.getHits().getTotalHits().value, equalTo(3L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + ); assertRequestCacheStats(0, 1); // Search again and check it hits the cache - final SearchResponse r2 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r2); - assertThat(r2.getHits().getTotalHits().value, equalTo(3L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + ); assertRequestCacheStats(1, 1); // Index some more documents in the query range and refresh @@ -67,13 +66,13 @@ public void testQueryRewriteOnRefresh() throws Exception { refreshIndex(); // Search again and check the request cache for another miss since request cache should be invalidated by refresh - final SearchResponse r3 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r3); - assertThat(r3.getHits().getTotalHits().value, equalTo(5L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java index 2c4c620c057b2..143aab4e58c78 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; @@ -42,7 +41,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -204,30 +205,34 @@ public void testCardinalityAggregation() throws IOException { assertNoFailures(bulkResponse); // Test the root flattened field. - SearchResponse response = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened")) - .get(); - - assertNoFailures(response); - Cardinality count = response.getAggregations().get("cardinality"); - assertCardinality(count, numDocs, precisionThreshold); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened")), + response -> { + Cardinality count = response.getAggregations().get("cardinality"); + assertCardinality(count, numDocs, precisionThreshold); + } + ); // Test two keyed flattened fields. - SearchResponse firstResponse = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.first")) - .get(); - assertNoFailures(firstResponse); - - Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); - assertCardinality(firstCount, numDocs, precisionThreshold); - - SearchResponse secondResponse = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.second")) - .get(); - assertNoFailures(secondResponse); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.first")), + firstResponse -> { + + Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); + assertCardinality(firstCount, numDocs, precisionThreshold); + } + ); - Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); - assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.second")), + secondResponse -> { + Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); + assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); + } + ); } private void assertCardinality(Cardinality count, long value, int precisionThreshold) { @@ -262,60 +267,56 @@ public void testTermsAggregation() throws IOException { // Aggregate on the root 'labels' field. TermsAggregationBuilder builder = createTermsAgg("labels"); - SearchResponse response = client().prepareSearch("test").addAggregation(builder).get(); - assertNoFailures(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(6)); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(builder), response -> { + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(6)); - Terms.Bucket bucket1 = terms.getBuckets().get(0); - assertEquals("urgent", bucket1.getKey()); - assertEquals(5, bucket1.getDocCount()); + Terms.Bucket bucket1 = terms.getBuckets().get(0); + assertEquals("urgent", bucket1.getKey()); + assertEquals(5, bucket1.getDocCount()); - Terms.Bucket bucket2 = terms.getBuckets().get(1); - assertThat(bucket2.getKeyAsString(), startsWith("v1.2.")); - assertEquals(1, bucket2.getDocCount()); + Terms.Bucket bucket2 = terms.getBuckets().get(1); + assertThat(bucket2.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket2.getDocCount()); + }); // Aggregate on the 'priority' subfield. TermsAggregationBuilder priorityAgg = createTermsAgg("labels.priority"); - SearchResponse priorityResponse = client().prepareSearch("test").addAggregation(priorityAgg).get(); - assertNoFailures(priorityResponse); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(priorityAgg), priorityResponse -> { + Terms priorityTerms = priorityResponse.getAggregations().get("terms"); + assertThat(priorityTerms, notNullValue()); + assertThat(priorityTerms.getName(), equalTo("terms")); + assertThat(priorityTerms.getBuckets().size(), equalTo(1)); - Terms priorityTerms = priorityResponse.getAggregations().get("terms"); - assertThat(priorityTerms, notNullValue()); - assertThat(priorityTerms.getName(), equalTo("terms")); - assertThat(priorityTerms.getBuckets().size(), equalTo(1)); - - Terms.Bucket priorityBucket = priorityTerms.getBuckets().get(0); - assertEquals("urgent", priorityBucket.getKey()); - assertEquals(5, priorityBucket.getDocCount()); + Terms.Bucket priorityBucket = priorityTerms.getBuckets().get(0); + assertEquals("urgent", priorityBucket.getKey()); + assertEquals(5, priorityBucket.getDocCount()); + }); // Aggregate on the 'release' subfield. TermsAggregationBuilder releaseAgg = createTermsAgg("labels.release"); - SearchResponse releaseResponse = client().prepareSearch("test").addAggregation(releaseAgg).get(); - assertNoFailures(releaseResponse); - - Terms releaseTerms = releaseResponse.getAggregations().get("terms"); - assertThat(releaseTerms, notNullValue()); - assertThat(releaseTerms.getName(), equalTo("terms")); - assertThat(releaseTerms.getBuckets().size(), equalTo(5)); - - for (Terms.Bucket bucket : releaseTerms.getBuckets()) { - assertThat(bucket.getKeyAsString(), startsWith("v1.2.")); - assertEquals(1, bucket.getDocCount()); - } + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(releaseAgg), releaseResponse -> { + Terms releaseTerms = releaseResponse.getAggregations().get("terms"); + assertThat(releaseTerms, notNullValue()); + assertThat(releaseTerms.getName(), equalTo("terms")); + assertThat(releaseTerms.getBuckets().size(), equalTo(5)); + + for (Terms.Bucket bucket : releaseTerms.getBuckets()) { + assertThat(bucket.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket.getDocCount()); + } + }); // Aggregate on the 'priority' subfield with a min_doc_count of 0. TermsAggregationBuilder minDocCountAgg = createTermsAgg("labels.priority").minDocCount(0); - SearchResponse minDocCountResponse = client().prepareSearch("test").addAggregation(minDocCountAgg).get(); - assertNoFailures(minDocCountResponse); - - Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); - assertThat(minDocCountTerms, notNullValue()); - assertThat(minDocCountTerms.getName(), equalTo("terms")); - assertThat(minDocCountTerms.getBuckets().size(), equalTo(1)); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(minDocCountAgg), minDocCountResponse -> { + Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); + assertThat(minDocCountTerms, notNullValue()); + assertThat(minDocCountTerms.getName(), equalTo("terms")); + assertThat(minDocCountTerms.getBuckets().size(), equalTo(1)); + }); } private TermsAggregationBuilder createTermsAgg(String field) { @@ -339,19 +340,22 @@ public void testLoadDocValuesFields() throws Exception { ) .get(); - SearchResponse response = client().prepareSearch("test").addDocValueField("flattened").addDocValueField("flattened.key").get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertNoFailuresAndResponse( + client().prepareSearch("test").addDocValueField("flattened").addDocValueField("flattened.key"), + response -> { + assertHitCount(response, 1); - Map fields = response.getHits().getAt(0).getFields(); + Map fields = response.getHits().getAt(0).getFields(); - DocumentField field = fields.get("flattened"); - assertEquals("flattened", field.getName()); - assertEquals(Arrays.asList("other_value", "value"), field.getValues()); + DocumentField field = fields.get("flattened"); + assertEquals("flattened", field.getName()); + assertEquals(Arrays.asList("other_value", "value"), field.getValues()); - DocumentField keyedField = fields.get("flattened.key"); - assertEquals("flattened.key", keyedField.getName()); - assertEquals("value", keyedField.getValue()); + DocumentField keyedField = fields.get("flattened.key"); + assertEquals("flattened.key", keyedField.getName()); + assertEquals("value", keyedField.getValue()); + } + ); } public void testFieldSort() throws Exception { @@ -386,20 +390,22 @@ public void testFieldSort() throws Exception { .setSource(XContentFactory.jsonBuilder().startObject().startObject("flattened").field("other_key", "E").endObject().endObject()) .get(); - SearchResponse response = client().prepareSearch("test").addSort("flattened", SortOrder.DESC).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "3", "1", "2"); - - response = client().prepareSearch("test").addSort("flattened.key", SortOrder.DESC).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "2", "1", "3"); - - response = client().prepareSearch("test").addSort(new FieldSortBuilder("flattened.key").order(SortOrder.DESC).missing("Z")).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "3", "2", "1"); + assertNoFailuresAndResponse(client().prepareSearch("test").addSort("flattened", SortOrder.DESC), response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "1", "2"); + }); + assertNoFailuresAndResponse(client().prepareSearch("test").addSort("flattened.key", SortOrder.DESC), response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "2", "1", "3"); + }); + + assertNoFailuresAndResponse( + client().prepareSearch("test").addSort(new FieldSortBuilder("flattened.key").order(SortOrder.DESC).missing("Z")), + response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "2", "1"); + } + ); } public void testSourceFiltering() { @@ -410,23 +416,32 @@ public void testSourceFiltering() { prepareIndex("test").setId("1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).setSource(source).get(); - SearchResponse response = client().prepareSearch("test").setFetchSource(true).get(); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); + assertResponse( + client().prepareSearch("test").setFetchSource(true), + response -> assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)) + ); // Check 'include' filtering. - response = client().prepareSearch("test").setFetchSource("headers", null).get(); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); - - response = client().prepareSearch("test").setFetchSource("headers.content-type", null).get(); - Map filteredSource = Collections.singletonMap( - "headers", - Collections.singletonMap("content-type", "application/json") + assertResponse( + client().prepareSearch("test").setFetchSource("headers", null), + response -> assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)) ); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + + assertResponse(client().prepareSearch("test").setFetchSource("headers.content-type", null), response -> { + Map filteredSource = Collections.singletonMap( + "headers", + Collections.singletonMap("content-type", "application/json") + ); + assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + }); // Check 'exclude' filtering. - response = client().prepareSearch("test").setFetchSource(null, "headers.content-type").get(); - filteredSource = Collections.singletonMap("headers", Collections.singletonMap("origin", "https://www.elastic.co")); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + assertResponse( + client().prepareSearch("test").setFetchSource(null, "headers.content-type"), + response -> assertThat( + response.getHits().getAt(0).getSourceAsMap(), + equalTo(Collections.singletonMap("headers", Collections.singletonMap("origin", "https://www.elastic.co"))) + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 270ab3003a1f1..aa787e6343654 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -137,6 +137,7 @@ import static org.elasticsearch.search.SearchService.SEARCH_WORKER_THREADS_ENABLED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; @@ -241,8 +242,10 @@ protected Settings nodeSettings() { public void testClearOnClose() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -253,8 +256,10 @@ public void testClearOnClose() { public void testClearOnStop() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -265,8 +270,10 @@ public void testClearOnStop() { public void testClearIndexDelete() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -479,8 +486,10 @@ public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws Executi public void testBeforeShardLockDuringShardCreate() { IndexService indexService = createIndex("index", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build()); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -787,9 +796,9 @@ public void testMaxOpenScrollContexts() throws Exception { LinkedList clearScrollIds = new LinkedList<>(); for (int i = 0; i < SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY); i++) { - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - - if (randomInt(4) == 0) clearScrollIds.addLast(searchResponse.getScrollId()); + assertResponse(client().prepareSearch("index").setSize(1).setScroll("1m"), searchResponse -> { + if (randomInt(4) == 0) clearScrollIds.addLast(searchResponse.getScrollId()); + }); } ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); @@ -797,7 +806,7 @@ public void testMaxOpenScrollContexts() throws Exception { client().clearScroll(clearScrollRequest); for (int i = 0; i < clearScrollIds.size(); i++) { - client().prepareSearch("index").setSize(1).setScroll("1m").get(); + client().prepareSearch("index").setSize(1).setScroll("1m").get().decRef(); } final ShardScrollRequestTest request = new ShardScrollRequestTest(indexShard.shardId()); @@ -1433,7 +1442,7 @@ public void testDeleteIndexWhileSearch() throws Exception { latch.countDown(); while (stopped.get() == false) { try { - client().prepareSearch("test").setRequestCache(false).get(); + client().prepareSearch("test").setRequestCache(false).get().decRef(); } catch (Exception ignored) { return; } @@ -1635,20 +1644,27 @@ public void testCancelFetchPhaseEarly() throws Exception { service.setOnCreateSearchContext(c -> searchContextCreated.set(true)); // Test fetch phase is cancelled early - String scrollId = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))) - .get() - .getScrollId(); + String scrollId; + var searchResponse = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))).get(); + try { + scrollId = searchResponse.getScrollId(); + } finally { + searchResponse.decRef(); + } - client().searchScroll(new SearchScrollRequest(scrollId)).get(); + client().searchScroll(new SearchScrollRequest(scrollId)).get().decRef(); assertThat(searchContextCreated.get(), is(true)); ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.addScrollId(scrollId); client().clearScroll(clearScrollRequest); - scrollId = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))) - .get() - .getScrollId(); + searchResponse = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))).get(); + try { + scrollId = searchResponse.getScrollId(); + } finally { + searchResponse.decRef(); + } searchContextCreated.set(false); service.setOnCheckCancelled(t -> { SearchShardTask task = new SearchShardTask(randomLong(), "transport", "action", "", TaskId.EMPTY_TASK_ID, emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java index cfa0087731b60..779e0ad28433a 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -24,6 +23,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; public class GeoPointShapeQueryTests extends BasePointShapeQueryTestCase { @@ -78,8 +78,7 @@ public void testFieldAlias() throws IOException { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", point)).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", point)), 1); } private final DatelinePointShapeQueryTestCase dateline = new DatelinePointShapeQueryTestCase(); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 19f0d1e2e88a0..c5d5ecc1f90e8 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -766,16 +766,22 @@ public void testConcurrentSnapshotRestoreAndDeleteOther() { assertEquals(shards, restoreSnapshotResponse.getRestoreInfo().totalShards()); client().search( new SearchRequest("restored_" + index).source(new SearchSourceBuilder().size(0).trackTotalHits(true)), - searchResponseListener + searchResponseListener.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + }) ); }); deterministicTaskQueue.runAllRunnableTasks(); - assertEquals( - documentsFirstSnapshot + documentsSecondSnapshot, - Objects.requireNonNull(safeResult(searchResponseListener).getHits().getTotalHits()).value - ); + var response = safeResult(searchResponseListener); + try { + assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + } finally { + response.decRef(); + } + assertThat(safeResult(deleteSnapshotStepListener).isAcknowledged(), is(true)); assertThat(safeResult(restoreSnapshotResponseListener).getRestoreInfo().failedShards(), is(0)); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index a2499c06d6ccc..71030358e901f 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -333,7 +333,13 @@ protected void testSnapshotAndRestore(boolean recreateRepositoryBeforeRestore) t logger.info("--> add random documents to {}", index); addRandomDocuments(index, randomIntBetween(10, 1000)); } else { - int docCount = (int) prepareSearch(index).setSize(0).get().getHits().getTotalHits().value; + var resp = prepareSearch(index).setSize(0).get(); + final int docCount; + try { + docCount = (int) resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } int deleteCount = randomIntBetween(1, docCount); logger.info("--> delete {} random documents from {}", deleteCount, index); for (int i = 0; i < deleteCount; i++) { @@ -403,7 +409,12 @@ public void testMultipleSnapshotAndRollback() throws Exception { addRandomDocuments(indexName, docCount); } // Check number of documents in this iteration - docCounts[i] = (int) prepareSearch(indexName).setSize(0).get().getHits().getTotalHits().value; + var resp = prepareSearch(indexName).setSize(0).get(); + try { + docCounts[i] = (int) resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); assertSuccessfulSnapshot( clusterAdmin().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName) diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 3f394c1384432..cef8d555b111d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -147,7 +147,8 @@ public void testIndexPointsCircle() throws Exception { try { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)) - .get(); + .get() + .decRef(); } catch (Exception e) { assertThat( e.getCause().getMessage(), diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java index 58328671c58e8..cae57d5137acf 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java @@ -262,7 +262,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept // Set search.allow_expensive_queries to "null" updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); - assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(builder, 1); // Set search.allow_expensive_queries to "true" updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", true)); diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 570d583335a12..0b5b953df84fc 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -502,9 +502,14 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce } protected long getCountForIndex(String indexName) { - return client().search( + var resp = client().search( new SearchRequest(new SearchRequest(indexName).source(new SearchSourceBuilder().size(0).trackTotalHits(true))) - ).actionGet().getHits().getTotalHits().value; + ).actionGet(); + try { + return resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } } protected void assertDocCount(String index, long count) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 2f6286092b535..e0083d5570baa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1064,18 +1064,17 @@ public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) thr if (lastKnownCount >= numDocs) { try { - long count = prepareSearch().setTrackTotalHits(true) - .setSize(0) - .setQuery(matchAllQuery()) - .get() - .getHits() - .getTotalHits().value; - - if (count == lastKnownCount) { - // no progress - try to refresh for the next time - indicesAdmin().prepareRefresh().get(); + var resp = prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()).get(); + try { + long count = resp.getHits().getTotalHits().value; + if (count == lastKnownCount) { + // no progress - try to refresh for the next time + indicesAdmin().prepareRefresh().get(); + } + lastKnownCount = count; + } finally { + resp.decRef(); } - lastKnownCount = count; } catch (Exception e) { // count now acts like search and barfs if all shards failed... logger.debug("failed to executed count", e); throw e; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java index e6b263abd0f01..9c326f067caf7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.dataframe; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -14,6 +14,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -171,7 +172,11 @@ public QueryBuilder getParsedQuery() { if (exception instanceof RuntimeException runtimeException) { throw runtimeException; } else { - throw new ElasticsearchException(queryProvider.getParsingException()); + throw new ElasticsearchStatusException( + queryProvider.getParsingException().getMessage(), + RestStatus.BAD_REQUEST, + queryProvider.getParsingException() + ); } } return queryProvider.getParsedQuery(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 1dde9dc6075d0..4187762ca58c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; @@ -23,6 +23,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.XContentType; @@ -189,10 +190,11 @@ protected void doRun() throws Exception { listener.onResponse(true); } else { listener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "Attempt to put missing mapping in indices " + Arrays.toString(indicesThatRequireAnUpdate) - + " was not acknowledged" + + " was not acknowledged", + RestStatus.TOO_MANY_REQUESTS ) ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java index f9da4082dbfa2..b2a764b0be5b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -186,6 +187,15 @@ public boolean equals(Object obj) { && Objects.equals(authState, other.authState) && getTimeout().equals(other.getTimeout()); } + + @Override + public boolean match(Task task) { + if (task.getDescription().startsWith(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX)) { + String taskId = task.getDescription().substring(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX.length()); + return taskId.equals(this.id); + } + return false; + } } public static class Response extends BaseTasksResponse implements ToXContentObject { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java index bcfe2b1728cbf..9c90a10e204f0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction.Request; import org.elasticsearch.xpack.core.transform.transforms.AuthorizationStateTests; import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; @@ -74,4 +75,12 @@ protected Request mutateInstance(Request instance) { return new Request(update, id, deferValidation, timeout); } + + public void testMatch() { + Request request = new Request(randomTransformConfigUpdate(), "my-transform-7", false, null); + assertTrue(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-7", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-77", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "my-transform-7", null, null))); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java index aa28135787b5d..2dcb9c5dfe705 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -37,6 +37,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; + public class TransportCancelJobModelSnapshotUpgradeAction extends HandledTransportAction { private static final Logger logger = LogManager.getLogger(TransportCancelJobModelSnapshotUpgradeAction.class); @@ -134,11 +136,11 @@ private void sendResponseOrFailure(ActionListener listener, AtomicArra + request.getJobId() + "]. Total failures [" + caughtExceptions.size() - + "], rethrowing first, all Exceptions: [" + + "], rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 53f6c19ce43f1..7b561ccaede2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -63,6 +63,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; public class TransportCloseJobAction extends TransportTasksAction< JobTask, @@ -537,7 +538,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new CloseJobAction.Response(true)); return; } @@ -546,11 +547,11 @@ private static void sendResponseOrFailure( + jobId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java index 5aa85a6331c22..495d75b2de2cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java @@ -249,7 +249,17 @@ private static void handleFailure(Exception e, DeleteForecastAction.Request requ ); } } else { - listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchStatusException( + "An error occurred while searching forecasts to delete", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)); + } } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java index ebe766f6b5669..767ec08078b42 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -23,6 +23,7 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -120,8 +121,9 @@ protected void masterOperation( () -> format("[%s] failed to cleanup job after datafeed creation failure", request.getJobBuilder().getId()), deleteFailed ); - ElasticsearchException ex = new ElasticsearchException( + ElasticsearchStatusException ex = new ElasticsearchStatusException( "failed to cleanup job after datafeed creation failure", + RestStatus.REQUEST_TIMEOUT, failed ); ex.addSuppressed(deleteFailed); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java index c3d35fbc11593..42d36006acbde 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -58,6 +57,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; + /** * Stops the persistent task for running data frame analytics. */ @@ -297,7 +298,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new StopDataFrameAnalyticsAction.Response(true)); return; } @@ -306,11 +307,11 @@ private static void sendResponseOrFailure( + analyticsId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index 2c9668a504b55..41359f5fcc166 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -58,6 +58,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; public class TransportStopDatafeedAction extends TransportTasksAction< TransportStartDatafeedAction.DatafeedTask, @@ -462,7 +463,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new StopDatafeedAction.Response(true)); return; } @@ -471,11 +472,11 @@ private static void sendResponseOrFailure( + datafeedId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java index 6335e0b78bd83..3f6193c124a9a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -28,6 +28,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -270,8 +271,9 @@ public void onFailure(Exception e) { @Override public void onTimeout(TimeValue timeout) { listener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "snapshot upgrader request [{}] [{}] timed out after [{}]", + RestStatus.REQUEST_TIMEOUT, params.getJobId(), params.getSnapshotId(), timeout diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java index 8df56d9df9c2f..6fce8aa20ed16 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java @@ -7,11 +7,12 @@ package org.elasticsearch.xpack.ml.aggs.categorization; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -123,12 +124,13 @@ public CategorizeTextAggregationBuilder(StreamInput in) throws IOException { super(in); // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } this.bucketCountThresholds = new TermsAggregator.BucketCountThresholds(in); @@ -279,12 +281,13 @@ protected CategorizeTextAggregationBuilder( protected void doWriteTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } bucketCountThresholds.writeTo(out); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 0ed673ac5a365..9b337d559854a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -8,10 +8,11 @@ package org.elasticsearch.xpack.ml.aggs.categorization; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -109,12 +110,13 @@ public Bucket(SerializableTokenListCategory serializableCategory, long bucketOrd public Bucket(StreamInput in) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } serializableCategory = new SerializableTokenListCategory(in); @@ -127,12 +129,13 @@ public Bucket(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } serializableCategory.writeTo(out); @@ -239,12 +242,13 @@ public InternalCategorizationAggregation(StreamInput in) throws IOException { super(in); // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } this.similarityThreshold = in.readVInt(); @@ -257,12 +261,13 @@ public InternalCategorizationAggregation(StreamInput in) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } out.writeVInt(similarityThreshold); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java index 330327dc31a46..e61ffba9b3164 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * This file has been contributed to be a Generative AI */ package org.elasticsearch.xpack.ml.datafeed; @@ -10,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.IndicesOptions; @@ -17,6 +20,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; @@ -109,7 +113,20 @@ public void runUpdate() { logger.debug(() -> "[" + update.getId() + "] datafeed successfully updated"); } catch (Exception ex) { logger.warn(() -> "[" + update.getId() + "] failed being updated", ex); - failures.add(new ElasticsearchException("Failed to update datafeed {}", ex, update.getId())); + if (ex instanceof ElasticsearchException elasticsearchException) { + failures.add( + new ElasticsearchStatusException( + "Failed to update datafeed {}", + elasticsearchException.status(), + elasticsearchException, + update.getId() + ) + ); + } else { + failures.add( + new ElasticsearchStatusException("Failed to update datafeed {}", RestStatus.REQUEST_TIMEOUT, ex, update.getId()) + ); + } } } if (failures.isEmpty()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index 168b0deda87d4..cc59903436e2f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; @@ -115,15 +116,14 @@ public void run(String modelId) { } } catch (Exception e) { LOGGER.error(() -> format("[%s] Error running inference on model [%s]", config.getId(), modelId), e); - - if (e instanceof ElasticsearchException) { - Throwable rootCause = ((ElasticsearchException) e).getRootCause(); - throw new ElasticsearchException( + if (e instanceof ElasticsearchException elasticsearchException) { + throw new ElasticsearchStatusException( "[{}] failed running inference on model [{}]; cause was [{}]", - rootCause, + elasticsearchException.status(), + elasticsearchException.getRootCause(), config.getId(), modelId, - rootCause.getMessage() + elasticsearchException.getRootCause().getMessage() ); } throw ExceptionsHelper.serverError( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index 015b88552a1d0..3ace40e0deb6b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; @@ -24,6 +24,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -238,13 +239,14 @@ static SearchResponse retryingSearch(Client client, String modelId, SearchReques * This intentionally prevents that code from attempting to retry loading the entire model. If the retry logic here * fails after the set retries we should not retry loading the entire model to avoid additional strain on the cluster. */ - throw new ElasticsearchException( + throw new ElasticsearchStatusException( format( "loading model [%s] failed after [%s] retries. The deployment is now in a failed state, " + "the error may be transient please stop the deployment and restart", modelId, retries ), + RestStatus.TOO_MANY_REQUESTS, e ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 5f796242e5bf8..d309ee2e5dc95 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -635,13 +635,19 @@ public void datafeedTimingStats( int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards(); if (CollectionUtils.isEmpty(shardFailures) == false) { LOGGER.error("[{}] Search request returned shard failures: {}", jobId, Arrays.toString(shardFailures)); - listener.onFailure(new ElasticsearchException(ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures))); + listener.onFailure( + new ElasticsearchStatusException( + ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures), + RestStatus.TOO_MANY_REQUESTS + ) + ); return; } if (unavailableShards > 0) { listener.onFailure( - new ElasticsearchException( - "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards" + new ElasticsearchStatusException( + "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards", + RestStatus.TOO_MANY_REQUESTS ) ); return; @@ -739,13 +745,19 @@ public void getAutodetectParams(Job job, String snapshotId, Consumer 0) { errorHandler.accept( - new ElasticsearchException( - "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards" + new ElasticsearchStatusException( + "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards", + RestStatus.TOO_MANY_REQUESTS ) ); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java index 49b02bdd6ae74..f124deecd9914 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java @@ -8,7 +8,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.task.JobTask; @@ -61,7 +62,7 @@ void tryLock() { throw ExceptionsHelper.serverError("Failed to acquire process lock for job [" + jobTask.getJobId() + "]"); } } catch (InterruptedException e) { - throw new ElasticsearchException(e); + throw new ElasticsearchStatusException(e.getMessage(), RestStatus.TOO_MANY_REQUESTS, e); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java index 427b7c9defa5a..917d5881ae130 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.client.internal.OriginSettingClient; @@ -19,6 +19,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.annotations.Annotation; @@ -100,7 +101,13 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired annotations for job [" + job.getId() + "]", e)); + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired annotations for job [" + job.getId() + "]", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index ed4e6875e260a..424668a20bf05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -22,6 +23,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -75,7 +77,15 @@ public void remove(float requestsPerSec, ActionListener listener, Boole LOGGER.debug("Removing forecasts that expire before [{}]", cutoffEpochMs); ActionListener forecastStatsHandler = ActionListener.wrap( searchResponse -> deleteForecasts(searchResponse, requestsPerSec, listener, isTimedOutSupplier), - e -> listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)) + e -> { + listener.onFailure( + new ElasticsearchStatusException( + "An error occurred while searching forecasts to delete", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); + } ); SearchSourceBuilder source = new SearchSourceBuilder(); @@ -143,7 +153,19 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired forecasts", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchException( + "Failed to remove expired forecasts", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure( + new ElasticsearchStatusException("Failed to remove expired forecasts", RestStatus.TOO_MANY_REQUESTS, e) + ); + } } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java index 1854e3b752de3..507e9dac6282d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ThreadedActionListener; @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -230,7 +231,14 @@ public void onResponse(QueryPage searchResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("[{}] Search for expired snapshots failed", e, job.getId())); + listener.onFailure( + new ElasticsearchStatusException( + "[{}] Search for expired snapshots failed", + RestStatus.TOO_MANY_REQUESTS, + e, + job.getId() + ) + ); } }; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java index 537297d130789..db712def11eac 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ThreadedActionListener; @@ -22,6 +23,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -115,7 +117,23 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired results for job [" + job.getId() + "]", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired results for job [" + job.getId() + "]", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired results for job [" + job.getId() + "]", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); + } } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java new file mode 100644 index 0000000000000..d60194918274e --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * This file was contributed to by generative AI + */ + +package org.elasticsearch.xpack.ml.utils; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.rest.RestStatus; + +import java.util.List; + +public class ExceptionCollectionHandling { + + /** + * Given an array of exceptions, return a single ElasticsearchStatusException. + * Return the first exception if all exceptions have 4XX status. + * Otherwise, return a generic 500 error. + * + * @param failures must not be empty or null + * @param message the message to use for the ElasticsearchStatusException + */ + public static ElasticsearchStatusException exceptionArrayToStatusException(AtomicArray failures, String message) { + + List caughtExceptions = failures.asList(); + if (caughtExceptions.isEmpty()) { + assert false : "method to combine exceptions called with no exceptions"; + return new ElasticsearchStatusException("No exceptions caught", RestStatus.INTERNAL_SERVER_ERROR); + } else { + + boolean allElasticsearchException = true; + boolean allStatus4xx = true; + + for (Exception exception : caughtExceptions) { + if (exception instanceof ElasticsearchException elasticsearchException) { + if (elasticsearchException.status().getStatus() < 400 || elasticsearchException.status().getStatus() >= 500) { + allStatus4xx = false; + } + } else { + allElasticsearchException = false; + break; + } + } + + if (allElasticsearchException && allStatus4xx) { + return new ElasticsearchStatusException( + message, + ((ElasticsearchException) caughtExceptions.get(0)).status(), + caughtExceptions.get(0) + ); + } else { + return new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + + } + + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java index 5fa434e530bc5..e87fbf48ca421 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -192,8 +191,9 @@ public void bulkIndexWithRetry( ) { if (isShutdown || isResetMode) { finalListener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "Bulk indexing has failed as {}", + RestStatus.TOO_MANY_REQUESTS, isShutdown ? "node is shutting down." : "machine learning feature is being reset." ) ); @@ -233,8 +233,9 @@ private BulkResponse bulkIndexWithRetry( BiConsumer> actionExecutor ) { if (isShutdown || isResetMode) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "Bulk indexing has failed as {}", + RestStatus.TOO_MANY_REQUESTS, isShutdown ? "node is shutting down." : "machine learning feature is being reset." ); }