Remove SearchPhase interface (#62050)

The interface is never used as an abstraction - implementations are are called directly,
and most of them don't need to implement the preProcess method.
This commit is contained in:
Alan Woodward 2020-09-07 13:43:53 +01:00 committed by Alan Woodward
parent 3389d5ccb2
commit cbc9578cbd
9 changed files with 59 additions and 106 deletions

View File

@ -0,0 +1,51 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.tasks.Task;
public class SearchContextSourcePrinter {
private final SearchContext searchContext;
public SearchContextSourcePrinter(SearchContext searchContext) {
this.searchContext = searchContext;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(searchContext.indexShard().shardId());
builder.append(" ");
if (searchContext.request() != null &&
searchContext.request().source() != null) {
builder.append("source[").append(searchContext.request().source().toString()).append("], ");
} else {
builder.append("source[], ");
}
if (searchContext.getTask() != null &&
searchContext.getTask().getHeader(Task.X_OPAQUE_ID) != null) {
builder.append("id[").append(searchContext.getTask().getHeader(Task.X_OPAQUE_ID)).append("], ");
} else {
builder.append("id[], ");
}
return builder.toString();
}
}

View File

@ -1,67 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.tasks.Task;
/**
* Represents a phase of a search request e.g. query, fetch etc.
*/
public interface SearchPhase {
/**
* Performs pre processing of the search context before the execute.
*/
void preProcess(SearchContext context);
/**
* Executes the search phase
*/
void execute(SearchContext context);
class SearchContextSourcePrinter {
private final SearchContext searchContext;
public SearchContextSourcePrinter(SearchContext searchContext) {
this.searchContext = searchContext;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(searchContext.indexShard().shardId());
builder.append(" ");
if (searchContext.request() != null &&
searchContext.request().source() != null) {
builder.append("source[").append(searchContext.request().source().toString()).append("], ");
} else {
builder.append("source[], ");
}
if (searchContext.getTask() != null &&
searchContext.getTask().getHeader(Task.X_OPAQUE_ID) != null) {
builder.append("id[").append(searchContext.getTask().getHeader(Task.X_OPAQUE_ID)).append("], ");
} else {
builder.append("id[], ");
}
return builder.toString();
}
}
}

View File

@ -699,9 +699,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
context.setTask(searchTask);
// pre process
dfsPhase.preProcess(context);
queryPhase.preProcess(context);
fetchPhase.preProcess(context);
// compute the context keep alive
long keepAlive = defaultKeepAlive;

View File

@ -22,7 +22,6 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.profile.query.CollectorResult;
@ -37,13 +36,12 @@ import java.util.List;
/**
* Aggregation phase of a search request, used to collect aggregations
*/
public class AggregationPhase implements SearchPhase {
public class AggregationPhase {
@Inject
public AggregationPhase() {
}
@Override
public void preProcess(SearchContext context) {
if (context.aggregations() != null) {
List<Aggregator> collectors = new ArrayList<>();
@ -73,7 +71,6 @@ public class AggregationPhase implements SearchPhase {
}
}
@Override
public void execute(SearchContext context) {
if (context.aggregations() == null) {
context.queryResult().aggregations(null);

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.TermStatistics;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.RescoreContext;
import org.elasticsearch.tasks.TaskCancelledException;
@ -40,13 +39,8 @@ import java.util.Map;
* Dfs phase of a search request, used to make scoring 100% accurate by collecting additional info from each shard before the query phase.
* The additional information is used to better compare the scores coming from all the shards, which depend on local factors (e.g. idf)
*/
public class DfsPhase implements SearchPhase {
public class DfsPhase {
@Override
public void preProcess(SearchContext context) {
}
@Override
public void execute(SearchContext context) {
try {
ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();

View File

@ -49,9 +49,9 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.SearchContextSourcePrinter;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@ -78,7 +78,7 @@ import static java.util.Collections.emptyMap;
* Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified
* after reducing all of the matches returned by the query phase
*/
public class FetchPhase implements SearchPhase {
public class FetchPhase {
private static final Logger LOGGER = LogManager.getLogger(FetchPhase.class);
private final FetchSubPhase[] fetchSubPhases;
@ -88,11 +88,6 @@ public class FetchPhase implements SearchPhase {
this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsPhase(this);
}
@Override
public void preProcess(SearchContext context) {
}
@Override
public void execute(SearchContext context) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("{}", new SearchContextSourcePrinter(context));

View File

@ -56,7 +56,7 @@ import org.elasticsearch.index.IndexSortConfig;
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.SearchContextSourcePrinter;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.aggregations.AggregationPhase;
import org.elasticsearch.search.internal.ContextIndexSearcher;
@ -92,7 +92,7 @@ import static org.elasticsearch.search.query.TopDocsCollectorContext.shortcutTot
* Query phase of a search request, used to run the query and get back from each shard information about the matching documents
* (document ids and score or sort criteria) so that matches can be reduced on the coordinating node
*/
public class QueryPhase implements SearchPhase {
public class QueryPhase {
private static final Logger LOGGER = LogManager.getLogger(QueryPhase.class);
// TODO: remove this property
public static final boolean SYS_PROP_REWRITE_SORT = Booleans.parseBoolean(System.getProperty("es.search.rewrite_sort", "true"));
@ -107,7 +107,6 @@ public class QueryPhase implements SearchPhase {
this.rescorePhase = new RescorePhase();
}
@Override
public void preProcess(SearchContext context) {
final Runnable cancellation;
if (context.lowLevelCancellation()) {
@ -129,7 +128,6 @@ public class QueryPhase implements SearchPhase {
}
}
@Override
public void execute(SearchContext searchContext) throws QueryPhaseExecutionException {
if (searchContext.hasOnlySuggest()) {
suggestPhase.execute(searchContext);

View File

@ -23,7 +23,6 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -31,12 +30,8 @@ import java.io.IOException;
/**
* Rescore phase of a search request, used to run potentially expensive scoring models against the top matching documents.
*/
public class RescorePhase implements SearchPhase {
@Override
public void preProcess(SearchContext context) {
}
public class RescorePhase {
@Override
public void execute(SearchContext context) {
TopDocs topDocs = context.queryResult().topDocs().topDocs;
if (topDocs.scoreDocs.length == 0) {

View File

@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
@ -35,12 +34,8 @@ import java.util.Map;
/**
* Suggest phase of a search request, used to collect suggestions
*/
public class SuggestPhase implements SearchPhase {
@Override
public void preProcess(SearchContext context) {
}
public class SuggestPhase {
@Override
public void execute(SearchContext context) {
final SuggestionSearchContext suggest = context.suggest();
if (suggest == null) {
@ -66,8 +61,5 @@ public class SuggestPhase implements SearchPhase {
}
}
static class SortedHits {
int[] docs;
}
}