Reduce performance impact of ExitableDirectoryReader (#53978) (#54014)

Benchmarking showed that the effect of the ExitableDirectoryReader
is reduced considerably when checking every 8191 docs. Moreover,
set the cancellable task before calling QueryPhase#preProcess()
and make sure we don't wrap with an ExitableDirectoryReader at all
when lowLevelCancellation is set to false to avoid completely any
performance impact.

Follows: #52822
Follows: #53166
Follows: #53496

(cherry picked from commit cdc377e8e74d3ca6c231c36dc5e80621aab47c69)
This commit is contained in:
Marios Trivyzas 2020-03-23 21:30:34 +01:00 committed by GitHub
parent 286c3660bd
commit 3a3e964956
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 64 additions and 66 deletions

View File

@ -101,6 +101,7 @@ final class DefaultSearchContext extends SearchContext {
private final QuerySearchResult queryResult; private final QuerySearchResult queryResult;
private final FetchSearchResult fetchResult; private final FetchSearchResult fetchResult;
private final float queryBoost; private final float queryBoost;
private final boolean lowLevelCancellation;
private TimeValue timeout; private TimeValue timeout;
// terminate after count // terminate after count
private int terminateAfter = DEFAULT_TERMINATE_AFTER; private int terminateAfter = DEFAULT_TERMINATE_AFTER;
@ -121,7 +122,6 @@ final class DefaultSearchContext extends SearchContext {
private int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; private int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO;
private FieldDoc searchAfter; private FieldDoc searchAfter;
private CollapseContext collapse; private CollapseContext collapse;
private boolean lowLevelCancellation;
// filter for sliced scroll // filter for sliced scroll
private SliceBuilder sliceBuilder; private SliceBuilder sliceBuilder;
private SearchShardTask task; private SearchShardTask task;
@ -160,7 +160,7 @@ final class DefaultSearchContext extends SearchContext {
DefaultSearchContext(SearchContextId id, ShardSearchRequest request, SearchShardTarget shardTarget, DefaultSearchContext(SearchContextId id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, ClusterService clusterService, IndexService indexService, Engine.Searcher engineSearcher, ClusterService clusterService, IndexService indexService,
IndexShard indexShard, BigArrays bigArrays, LongSupplier relativeTimeSupplier, TimeValue timeout, IndexShard indexShard, BigArrays bigArrays, LongSupplier relativeTimeSupplier, TimeValue timeout,
FetchPhase fetchPhase, Version minNodeVersion) throws IOException { FetchPhase fetchPhase, boolean lowLevelCancellation, Version minNodeVersion) throws IOException {
this.id = id; this.id = id;
this.request = request; this.request = request;
this.fetchPhase = fetchPhase; this.fetchPhase = fetchPhase;
@ -176,7 +176,7 @@ final class DefaultSearchContext extends SearchContext {
this.indexService = indexService; this.indexService = indexService;
this.clusterService = clusterService; this.clusterService = clusterService;
this.searcher = new ContextIndexSearcher(engineSearcher.getIndexReader(), engineSearcher.getSimilarity(), this.searcher = new ContextIndexSearcher(engineSearcher.getIndexReader(), engineSearcher.getSimilarity(),
engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy()); engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy(), lowLevelCancellation);
this.relativeTimeSupplier = relativeTimeSupplier; this.relativeTimeSupplier = relativeTimeSupplier;
this.timeout = timeout; this.timeout = timeout;
this.minNodeVersion = minNodeVersion; this.minNodeVersion = minNodeVersion;
@ -184,6 +184,7 @@ final class DefaultSearchContext extends SearchContext {
request::nowInMillis, shardTarget.getClusterAlias()); request::nowInMillis, shardTarget.getClusterAlias());
queryShardContext.setTypes(request.types()); queryShardContext.setTypes(request.types());
queryBoost = request.indexBoost(); queryBoost = request.indexBoost();
this.lowLevelCancellation = lowLevelCancellation;
} }
@Override @Override
@ -585,10 +586,6 @@ final class DefaultSearchContext extends SearchContext {
return lowLevelCancellation; return lowLevelCancellation;
} }
public void lowLevelCancellation(boolean lowLevelCancellation) {
this.lowLevelCancellation = lowLevelCancellation;
}
@Override @Override
public FieldDoc searchAfter() { public FieldDoc searchAfter() {
return searchAfter; return searchAfter;

View File

@ -339,10 +339,9 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
private DfsSearchResult executeDfsPhase(SearchRewriteContext rewriteContext, SearchShardTask task) throws IOException { private DfsSearchResult executeDfsPhase(SearchRewriteContext rewriteContext, SearchShardTask task) throws IOException {
final SearchContext context = createAndPutContext(rewriteContext); final SearchContext context = createAndPutContext(rewriteContext, task);
context.incRef(); context.incRef();
try { try {
context.setTask(task);
contextProcessing(context); contextProcessing(context);
dfsPhase.execute(context); dfsPhase.execute(context);
contextProcessedSuccessfully(context); contextProcessedSuccessfully(context);
@ -422,11 +421,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
private SearchPhaseResult executeQueryPhase(SearchRewriteContext rewriteContext, SearchShardTask task) throws Exception { private SearchPhaseResult executeQueryPhase(SearchRewriteContext rewriteContext, SearchShardTask task) throws Exception {
final SearchContext context = createAndPutContext(rewriteContext); final SearchContext context = createAndPutContext(rewriteContext, task);
final ShardSearchRequest request = rewriteContext.request; final ShardSearchRequest request = rewriteContext.request;
context.incRef(); context.incRef();
try { try {
context.setTask(task);
final long afterQueryTime; final long afterQueryTime;
try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) { try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) {
contextProcessing(context); contextProcessing(context);
@ -626,8 +624,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
final SearchContext createAndPutContext(SearchRewriteContext rewriteContext) throws IOException { final SearchContext createAndPutContext(SearchRewriteContext rewriteContext, SearchShardTask task) throws IOException {
SearchContext context = createContext(rewriteContext); SearchContext context = createContext(rewriteContext, task);
onNewContext(context); onNewContext(context);
boolean success = false; boolean success = false;
try { try {
@ -660,7 +658,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
} }
} }
final SearchContext createContext(SearchRewriteContext rewriteContext) throws IOException { final SearchContext createContext(SearchRewriteContext rewriteContext, SearchShardTask searchTask) throws IOException {
final DefaultSearchContext context = createSearchContext(rewriteContext, defaultSearchTimeout); final DefaultSearchContext context = createSearchContext(rewriteContext, defaultSearchTimeout);
try { try {
final ShardSearchRequest request = rewriteContext.request; final ShardSearchRequest request = rewriteContext.request;
@ -684,6 +682,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
if (context.size() == -1) { if (context.size() == -1) {
context.size(DEFAULT_SIZE); context.size(DEFAULT_SIZE);
} }
context.setTask(searchTask);
// pre process // pre process
dfsPhase.preProcess(context); dfsPhase.preProcess(context);
@ -696,7 +695,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
keepAlive = request.scroll().keepAlive().millis(); keepAlive = request.scroll().keepAlive().millis();
} }
contextScrollKeepAlive(context, keepAlive); contextScrollKeepAlive(context, keepAlive);
context.lowLevelCancellation(lowLevelCancellation);
} catch (Exception e) { } catch (Exception e) {
context.close(); context.close();
throw e; throw e;
@ -731,7 +729,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
final SearchContextId searchContextId = new SearchContextId(readerId, idGenerator.incrementAndGet()); final SearchContextId searchContextId = new SearchContextId(readerId, idGenerator.incrementAndGet());
DefaultSearchContext searchContext = new DefaultSearchContext(searchContextId, request, shardTarget, DefaultSearchContext searchContext = new DefaultSearchContext(searchContextId, request, shardTarget,
searcher, clusterService, indexService, indexShard, bigArrays, threadPool::relativeTimeInMillis, timeout, searcher, clusterService, indexService, indexShard, bigArrays, threadPool::relativeTimeInMillis, timeout,
fetchPhase, clusterService.state().nodes().getMinNodeVersion()); fetchPhase, lowLevelCancellation, clusterService.state().nodes().getMinNodeVersion());
success = true; success = true;
return searchContext; return searchContext;
} finally { } finally {

View File

@ -82,23 +82,20 @@ public class ContextIndexSearcher extends IndexSearcher {
private MutableQueryTimeout cancellable; private MutableQueryTimeout cancellable;
public ContextIndexSearcher(IndexReader reader, Similarity similarity, public ContextIndexSearcher(IndexReader reader, Similarity similarity,
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy) throws IOException { QueryCache queryCache, QueryCachingPolicy queryCachingPolicy,
this(reader, similarity, queryCache, queryCachingPolicy, new MutableQueryTimeout()); boolean wrapWithExitableDirectoryReader) throws IOException {
this(reader, similarity, queryCache, queryCachingPolicy, new MutableQueryTimeout(), wrapWithExitableDirectoryReader);
} }
// TODO: Make the 2nd constructor private so that the IndexReader is always wrapped. private ContextIndexSearcher(IndexReader reader, Similarity similarity,
// Some issues must be fixed:
// - regarding tests deriving from AggregatorTestCase and more specifically the use of searchAndReduce and
// the ShardSearcher sub-searchers.
// - tests that use a MultiReader
public ContextIndexSearcher(IndexReader reader, Similarity similarity,
QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy,
MutableQueryTimeout cancellable) throws IOException { MutableQueryTimeout cancellable,
super(cancellable != null ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader); boolean wrapWithExitableDirectoryReader) throws IOException {
super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader);
setSimilarity(similarity); setSimilarity(similarity);
setQueryCache(queryCache); setQueryCache(queryCache);
setQueryCachingPolicy(queryCachingPolicy); setQueryCachingPolicy(queryCachingPolicy);
this.cancellable = cancellable != null ? cancellable : new MutableQueryTimeout(); this.cancellable = cancellable;
} }
public void setProfiler(QueryProfiler profiler) { public void setProfiler(QueryProfiler profiler) {

View File

@ -245,7 +245,7 @@ class ExitableDirectoryReader extends FilterDirectoryReader {
private static class ExitableIntersectVisitor implements PointValues.IntersectVisitor { private static class ExitableIntersectVisitor implements PointValues.IntersectVisitor {
private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = (1 << 10) - 1; // 1023 private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = (1 << 13) - 1; // 8191
private final PointValues.IntersectVisitor in; private final PointValues.IntersectVisitor in;
private final QueryCancellation queryCancellation; private final QueryCancellation queryCancellation;

View File

@ -112,9 +112,9 @@ public class QueryPhase implements SearchPhase {
public void preProcess(SearchContext context) { public void preProcess(SearchContext context) {
final Runnable cancellation; final Runnable cancellation;
if (context.lowLevelCancellation()) { if (context.lowLevelCancellation()) {
SearchShardTask task = context.getTask();
cancellation = context.searcher().addQueryCancellation(() -> { cancellation = context.searcher().addQueryCancellation(() -> {
if (task.isCancelled()) { SearchShardTask task = context.getTask();
if (task != null && task.isCancelled()) {
throw new TaskCancelledException("cancelled"); throw new TaskCancelledException("cancelled");
} }
}); });
@ -282,9 +282,9 @@ public class QueryPhase implements SearchPhase {
} }
if (searchContext.lowLevelCancellation()) { if (searchContext.lowLevelCancellation()) {
SearchShardTask task = searchContext.getTask();
searcher.addQueryCancellation(() -> { searcher.addQueryCancellation(() -> {
if (task.isCancelled()) { SearchShardTask task = searchContext.getTask();
if (task != null && task.isCancelled()) {
throw new TaskCancelledException("cancelled"); throw new TaskCancelledException("cancelled");
} }
}); });

View File

@ -122,7 +122,8 @@ public class DefaultSearchContextTests extends ESTestCase {
SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE); SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE);
DefaultSearchContext context1 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 1L), DefaultSearchContext context1 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 1L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT); shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
context1.from(300); context1.from(300);
// resultWindow greater than maxResultWindow and scrollContext is null // resultWindow greater than maxResultWindow and scrollContext is null
@ -163,7 +164,8 @@ public class DefaultSearchContextTests extends ESTestCase {
// rescore is null but sliceBuilder is not null // rescore is null but sliceBuilder is not null
DefaultSearchContext context2 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 2L), DefaultSearchContext context2 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 2L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT); shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
SliceBuilder sliceBuilder = mock(SliceBuilder.class); SliceBuilder sliceBuilder = mock(SliceBuilder.class);
int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100); int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100);
@ -180,7 +182,8 @@ public class DefaultSearchContextTests extends ESTestCase {
when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST); when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST);
DefaultSearchContext context3 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 3L), DefaultSearchContext context3 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 3L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT); shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery();
context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false);
assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query())); assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query()));
@ -190,7 +193,8 @@ public class DefaultSearchContextTests extends ESTestCase {
when(shardSearchRequest.indexRoutings()).thenReturn(new String[0]); when(shardSearchRequest.indexRoutings()).thenReturn(new String[0]);
DefaultSearchContext context4 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 4L), DefaultSearchContext context4 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 4L),
shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, Version.CURRENT); shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null,
false, Version.CURRENT);
context4.sliceBuilder(new SliceBuilder(1,2)).parsedQuery(parsedQuery).preProcess(false); context4.sliceBuilder(new SliceBuilder(1,2)).parsedQuery(parsedQuery).preProcess(false);
Query query1 = context4.query(); Query query1 = context4.query();
context4.sliceBuilder(new SliceBuilder(0,2)).parsedQuery(parsedQuery).preProcess(false); context4.sliceBuilder(new SliceBuilder(0,2)).parsedQuery(parsedQuery).preProcess(false);

View File

@ -90,8 +90,8 @@ public class SearchCancellationTests extends ESTestCase {
} }
public void testAddingCancellationActions() throws IOException { public void testAddingCancellationActions() throws IOException {
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
NullPointerException npe = expectThrows(NullPointerException.class, () -> searcher.addQueryCancellation(null)); NullPointerException npe = expectThrows(NullPointerException.class, () -> searcher.addQueryCancellation(null));
assertEquals("cancellation runnable should not be null", npe.getMessage()); assertEquals("cancellation runnable should not be null", npe.getMessage());
@ -104,8 +104,8 @@ public class SearchCancellationTests extends ESTestCase {
public void testCancellableCollector() throws IOException { public void testCancellableCollector() throws IOException {
TotalHitCountCollector collector1 = new TotalHitCountCollector(); TotalHitCountCollector collector1 = new TotalHitCountCollector();
Runnable cancellation = () -> { throw new TaskCancelledException("cancelled"); }; Runnable cancellation = () -> { throw new TaskCancelledException("cancelled"); };
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
searcher.search(new MatchAllDocsQuery(), collector1); searcher.search(new MatchAllDocsQuery(), collector1);
assertThat(collector1.getTotalHits(), equalTo(reader.numDocs())); assertThat(collector1.getTotalHits(), equalTo(reader.numDocs()));
@ -120,14 +120,14 @@ public class SearchCancellationTests extends ESTestCase {
assertThat(collector2.getTotalHits(), equalTo(reader.numDocs())); assertThat(collector2.getTotalHits(), equalTo(reader.numDocs()));
} }
public void testCancellableDirectoryReader() throws IOException { public void testExitableDirectoryReader() throws IOException {
AtomicBoolean cancelled = new AtomicBoolean(true); AtomicBoolean cancelled = new AtomicBoolean(true);
Runnable cancellation = () -> { Runnable cancellation = () -> {
if (cancelled.get()) { if (cancelled.get()) {
throw new TaskCancelledException("cancelled"); throw new TaskCancelledException("cancelled");
}}; }};
ContextIndexSearcher searcher = new ContextIndexSearcher(reader, ContextIndexSearcher searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
searcher.addQueryCancellation(cancellation); searcher.addQueryCancellation(cancellation);
CompiledAutomaton automaton = new CompiledAutomaton(new RegExp("a.*").toAutomaton()); CompiledAutomaton automaton = new CompiledAutomaton(new RegExp("a.*").toAutomaton());

View File

@ -368,7 +368,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1,
new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null),
indexShard); indexShard);
final SearchContext contextWithDefaultTimeout = service.createContext(rewriteContext); final SearchContext contextWithDefaultTimeout = service.createContext(rewriteContext, null);
try { try {
// the search context should inherit the default timeout // the search context should inherit the default timeout
assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5))); assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5)));
@ -383,7 +383,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1,
new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null),
indexShard); indexShard);
final SearchContext context = service.createContext(rewriteContext); final SearchContext context = service.createContext(rewriteContext, null);
try { try {
// the search context should inherit the query timeout // the search context should inherit the query timeout
assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds))); assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds)));
@ -417,7 +417,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
{ {
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard); SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
try (SearchContext context = service.createContext(rewriteContext)) { try (SearchContext context = service.createContext(rewriteContext, null)) {
assertNotNull(context); assertNotNull(context);
} }
} }
@ -426,7 +426,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard); SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
searchSourceBuilder.docValueField("one_field_too_much"); searchSourceBuilder.docValueField("one_field_too_much");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> service.createContext(rewriteContext)); () -> service.createContext(rewriteContext, null));
assertEquals( assertEquals(
"Trying to retrieve too many docvalue_fields. Must be less than or equal to: [100] but was [101]. " "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [100] but was [101]. "
+ "This limit can be set by changing the [index.max_docvalue_fields_search] index level setting.", ex.getMessage()); + "This limit can be set by changing the [index.max_docvalue_fields_search] index level setting.", ex.getMessage());
@ -458,7 +458,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
{ {
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard); SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
try (SearchContext context = service.createContext(rewriteContext)) { try (SearchContext context = service.createContext(rewriteContext, null)) {
assertNotNull(context); assertNotNull(context);
} }
} }
@ -468,7 +468,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap()));
SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard); SearchService.SearchRewriteContext rewriteContext = service.acquireSearcherAndRewrite(shardRequest, indexShard);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> service.createContext(rewriteContext)); () -> service.createContext(rewriteContext, null));
assertEquals( assertEquals(
"Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxScriptFields + "] but was [" "Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxScriptFields + "] but was ["
+ (maxScriptFields + 1) + (maxScriptFields + 1)
@ -494,7 +494,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1,
new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null),
indexShard); indexShard);
try (SearchContext context = service.createContext(rewriteContext)) { try (SearchContext context = service.createContext(rewriteContext, null)) {
assertEquals(0, context.scriptFields().fields().size()); assertEquals(0, context.scriptFields().fields().size());
} }
} }
@ -531,7 +531,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
SearchService.SearchRewriteContext rewriteContext = SearchService.SearchRewriteContext rewriteContext =
service.acquireSearcherAndRewrite(new ShardScrollRequestTest(indexShard.shardId()), indexShard); service.acquireSearcherAndRewrite(new ShardScrollRequestTest(indexShard.shardId()), indexShard);
ElasticsearchException ex = expectThrows(ElasticsearchException.class, ElasticsearchException ex = expectThrows(ElasticsearchException.class,
() -> service.createAndPutContext(rewriteContext)); () -> service.createAndPutContext(rewriteContext, null));
assertEquals( assertEquals(
"Trying to create too many scroll contexts. Must be less than or equal to: [" + "Trying to create too many scroll contexts. Must be less than or equal to: [" +
SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY) + "]. " + SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY) + "]. " +
@ -557,7 +557,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
SearchService.SearchRewriteContext rewriteContext = SearchService.SearchRewriteContext rewriteContext =
searchService.acquireSearcherAndRewrite(new ShardScrollRequestTest(indexShard.shardId()), indexShard); searchService.acquireSearcherAndRewrite(new ShardScrollRequestTest(indexShard.shardId()), indexShard);
try { try {
searchService.createAndPutContext(rewriteContext); searchService.createAndPutContext(rewriteContext, null);
} catch (ElasticsearchException e) { } catch (ElasticsearchException e) {
assertThat(e.getMessage(), equalTo( assertThat(e.getMessage(), equalTo(
"Trying to create too many scroll contexts. Must be less than or equal to: " + "Trying to create too many scroll contexts. Must be less than or equal to: " +
@ -835,7 +835,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
} }
}, indexShard); }, indexShard);
NullPointerException e = expectThrows(NullPointerException.class, NullPointerException e = expectThrows(NullPointerException.class,
() -> service.createContext(rewriteContext)); () -> service.createContext(rewriteContext, null));
assertEquals("expected", e.getMessage()); assertEquals("expected", e.getMessage());
assertEquals("should have 2 store refs (IndexService + InternalEngine)", 2, indexService.getShard(0).store().refCount()); assertEquals("should have 2 store refs (IndexService + InternalEngine)", 2, indexService.getShard(0).store().refCount());
} }
@ -988,7 +988,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
int numContexts = randomIntBetween(1, 10); int numContexts = randomIntBetween(1, 10);
for (int i = 0; i < numContexts; i++) { for (int i = 0; i < numContexts; i++) {
SearchService.SearchRewriteContext rewriteContext = searchService.acquireSearcherAndRewrite(shardSearchRequest, indexShard); SearchService.SearchRewriteContext rewriteContext = searchService.acquireSearcherAndRewrite(shardSearchRequest, indexShard);
final SearchContext searchContext = searchService.createContext(rewriteContext); final SearchContext searchContext = searchService.createContext(rewriteContext, null);
assertThat(searchContext.id().getId(), equalTo((long) (i + 1))); assertThat(searchContext.id().getId(), equalTo((long) (i + 1)));
searchService.putContext(searchContext); searchService.putContext(searchContext);
contextIds.add(searchContext.id()); contextIds.add(searchContext.id());

View File

@ -240,7 +240,7 @@ public class ContextIndexSearcherTests extends ESTestCase {
DocumentSubsetDirectoryReader filteredReader = new DocumentSubsetDirectoryReader(reader, cache, roleQuery); DocumentSubsetDirectoryReader filteredReader = new DocumentSubsetDirectoryReader(reader, cache, roleQuery);
ContextIndexSearcher searcher = new ContextIndexSearcher(filteredReader, IndexSearcher.getDefaultSimilarity(), ContextIndexSearcher searcher = new ContextIndexSearcher(filteredReader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
// Assert wrapping // Assert wrapping
assertEquals(ExitableDirectoryReader.class, searcher.getIndexReader().getClass()); assertEquals(ExitableDirectoryReader.class, searcher.getIndexReader().getClass());

View File

@ -43,8 +43,8 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.core.internal.io.IOUtils;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileResult;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -83,7 +83,7 @@ public class QueryProfilerTests extends ESTestCase {
reader = w.getReader(); reader = w.getReader();
w.close(); w.close();
searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), searcher = new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), ALWAYS_CACHE_POLICY); IndexSearcher.getDefaultQueryCache(), ALWAYS_CACHE_POLICY, true);
} }
@After @After

View File

@ -925,12 +925,12 @@ public class QueryPhaseTests extends IndexShardTestCase {
private static ContextIndexSearcher newContextSearcher(IndexReader reader) throws IOException { private static ContextIndexSearcher newContextSearcher(IndexReader reader) throws IOException {
return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true);
} }
private static ContextIndexSearcher newEarlyTerminationContextSearcher(IndexReader reader, int size) throws IOException { private static ContextIndexSearcher newEarlyTerminationContextSearcher(IndexReader reader, int size) throws IOException {
return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()) { IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true) {
@Override @Override
public void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException { public void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
@ -943,7 +943,7 @@ public class QueryPhaseTests extends IndexShardTestCase {
// used to check that numeric long or date sort optimization was run // used to check that numeric long or date sort optimization was run
private static ContextIndexSearcher newOptimizedContextSearcher(IndexReader reader, int queryType) throws IOException { private static ContextIndexSearcher newOptimizedContextSearcher(IndexReader reader, int queryType) throws IOException {
return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(), return new ContextIndexSearcher(reader, IndexSearcher.getDefaultSimilarity(),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()) { IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true) {
@Override @Override
public void search(List<LeafReaderContext> leaves, Weight weight, CollectorManager manager, public void search(List<LeafReaderContext> leaves, Weight weight, CollectorManager manager,

View File

@ -204,7 +204,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
} }
}; };
ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(indexSearcher.getIndexReader(), ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(indexSearcher.getIndexReader(),
indexSearcher.getSimilarity(), queryCache, queryCachingPolicy, null); indexSearcher.getSimilarity(), queryCache, queryCachingPolicy, false);
SearchContext searchContext = mock(SearchContext.class); SearchContext searchContext = mock(SearchContext.class);
when(searchContext.numberOfShards()).thenReturn(1); when(searchContext.numberOfShards()).thenReturn(1);

View File

@ -161,8 +161,9 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery); when(queryShardContext.toQuery(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader); DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader, IndexSearcher indexSearcher = new ContextIndexSearcher(
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); wrappedDirectoryReader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(), true);
int expectedHitCount = valuesHitCount[i]; int expectedHitCount = valuesHitCount[i];
logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount);
@ -270,8 +271,9 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId); DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), shardId);
DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader); DirectoryReader wrappedDirectoryReader = wrapper.apply(directoryReader);
IndexSearcher indexSearcher = new ContextIndexSearcher(wrappedDirectoryReader, IndexSearcher indexSearcher = new ContextIndexSearcher(
IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy()); wrappedDirectoryReader, IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(), true);
ScoreDoc[] hits = indexSearcher.search(new MatchAllDocsQuery(), 1000).scoreDocs; ScoreDoc[] hits = indexSearcher.search(new MatchAllDocsQuery(), 1000).scoreDocs;
Set<Integer> actualDocIds = new HashSet<>(); Set<Integer> actualDocIds = new HashSet<>();