Merge pull request #20382 from javanna/enhancement/cleanup_parse_elements

Cleanup sub fetch phase extension point
This commit is contained in:
Luca Cavanna 2016-09-09 22:47:15 +02:00 committed by GitHub
commit 4b00cc37a1
41 changed files with 473 additions and 459 deletions

View File

@ -238,7 +238,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]IncompatibleClusterStateVersionException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]InternalClusterInfoService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]LocalNodeMasterListener.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]index[/\\]NodeIndexDeletedAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]index[/\\]NodeMappingRefreshAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]shard[/\\]ShardStateAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]block[/\\]ClusterBlock.java" checks="LineLength" />
@ -415,7 +414,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RootObjectMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]merge[/\\]MergeStats.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]AbstractQueryBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MatchQueryParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]InnerHitsQueryParserHelper.java" checks="LineLength" />
@ -488,15 +486,12 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestPendingClusterTasksAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]AbstractScriptParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistry.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptModes.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptParameterParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]Template.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]MultiValueMode.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]SearchService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]AggregatorFactories.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]InternalMultiBucketAggregation.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]ValuesSourceAggregationBuilder.java" checks="LineLength" />
@ -555,29 +550,19 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]ValuesSourceParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueFormat.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]builder[/\\]SearchSourceBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]controller[/\\]SearchPhaseController.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]AggregatedDfs.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]DfsSearchResult.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchPhase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSearchResult.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhaseParseElement.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]DefaultSearchContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]FilteredSearchContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]InternalSearchHit.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]SearchContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]ShardSearchTransportRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafDocLookup.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafFieldsLookup.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]QueryPhase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]RescoreParseElement.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]GeoDistanceSortParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]ScriptSortParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]SortParseElement.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestContextParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestUtils.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" />
@ -586,9 +571,7 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoQueryContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]CandidateScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellChecker.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]WordScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RestoreService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardFailure.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardsService.java" checks="LineLength" />
@ -952,7 +935,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportTwoNodesSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ChildQuerySearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ParentFieldLoadingIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhasePluginIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoBoundingBoxIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoFilterIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoShapeQueryTests.java" checks="LineLength" />
@ -1007,7 +989,6 @@
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolateRequestBuilder.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolateShardResponse.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]TransportMultiPercolateAction.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]TransportPercolateAction.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]TransportShardMultiPercolateAction.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolatorIT.java" checks="LineLength" />
<suppress files="modules[/\\]percolator[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorIT.java" checks="LineLength" />
@ -1084,7 +1065,6 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]IndexSettingsModule.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]InternalTestCluster.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]MockIndexEventListener.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]TestSearchContext.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]cluster[/\\]NoopClusterService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]cluster[/\\]TestClusterService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]discovery[/\\]ClusterDiscoveryConfiguration.java" checks="LineLength" />

View File

@ -25,10 +25,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.elasticsearch.plugins.Plugin;
/**
* A registry for {@link org.elasticsearch.common.io.stream.Writeable.Reader} readers of {@link NamedWriteable}.
@ -47,7 +43,7 @@ public class NamedWriteableRegistry {
/** A name for the writeable which is unique to the {@link #categoryClass}. */
public final String name;
/** A reader captability of reading*/
/** A reader capability of reading*/
public final Writeable.Reader<?> reader;
/** Creates a new entry which can be stored by the registry. */

View File

@ -871,6 +871,16 @@ public abstract class StreamOutput extends OutputStream {
}
}
/**
* Writes a list of strings
*/
public void writeStringList(List<String> list) throws IOException {
writeVInt(list.size());
for (String string: list) {
this.writeString(string);
}
}
/**
* Writes a list of {@link NamedWriteable} objects.
*/

View File

@ -36,7 +36,6 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
@ -572,12 +571,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
innerHitsContext.storedFieldsContext(storedFieldsContext);
}
if (docValueFields != null) {
DocValueFieldsContext docValueFieldsContext = innerHitsContext
.getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY);
for (String field : docValueFields) {
docValueFieldsContext.add(new DocValueFieldsContext.DocValueField(field));
}
docValueFieldsContext.setHitExecutionNeeded(true);
innerHitsContext.docValueFieldsContext(new DocValueFieldsContext(docValueFields));
}
if (scriptFields != null) {
for (ScriptField field : scriptFields) {

View File

@ -30,6 +30,8 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchExtParser;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
@ -82,6 +84,12 @@ public interface SearchPlugin {
default List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return emptyList();
}
/**
* The new {@link SearchExtParser}s defined by this plugin.
*/
default List<SearchExtSpec<?>> getSearchExts() {
return emptyList();
}
/**
* Get the {@link Highlighter}s defined by this plugin.
*/
@ -160,7 +168,7 @@ public interface SearchPlugin {
/**
* Specification for an {@link Aggregation}.
*/
public static class AggregationSpec extends SearchExtensionSpec<AggregationBuilder, Aggregator.Parser> {
class AggregationSpec extends SearchExtensionSpec<AggregationBuilder, Aggregator.Parser> {
private final Map<String, Writeable.Reader<? extends InternalAggregation>> resultReaders = new TreeMap<>();
/**
@ -217,7 +225,7 @@ public interface SearchPlugin {
/**
* Specification for a {@link PipelineAggregator}.
*/
public static class PipelineAggregationSpec extends SearchExtensionSpec<PipelineAggregationBuilder, PipelineAggregator.Parser> {
class PipelineAggregationSpec extends SearchExtensionSpec<PipelineAggregationBuilder, PipelineAggregator.Parser> {
private final Map<String, Writeable.Reader<? extends InternalAggregation>> resultReaders = new TreeMap<>();
private final Writeable.Reader<? extends PipelineAggregator> aggregatorReader;
@ -290,6 +298,19 @@ public interface SearchPlugin {
}
}
/**
* Specification for a {@link SearchExtBuilder} which represents an additional section that can be
* parsed in a search request (within the ext element).
*/
class SearchExtSpec<T extends SearchExtBuilder> extends SearchExtensionSpec<T, SearchExtParser<T>> {
public SearchExtSpec(ParseField name, Writeable.Reader<? extends T> reader, SearchExtParser<T> parser) {
super(name, reader, parser);
}
public SearchExtSpec(String name, Writeable.Reader<? extends T> reader, SearchExtParser<T> parser) {
super(name, reader, parser);
}
}
/**
* Specification of search time behavior extension like a custom {@link MovAvgModel} or {@link ScoreFunction}.

View File

@ -19,10 +19,6 @@
package org.elasticsearch.rest.action.search;
import java.io.IOException;
import java.util.Map;
import java.util.function.BiConsumer;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.SearchRequest;
@ -40,12 +36,16 @@ import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.rest.action.RestToXContentListener;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import java.io.IOException;
import java.util.Map;
import java.util.function.BiConsumer;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
@ -97,7 +97,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
final QueryParseContext queryParseContext = new QueryParseContext(searchRequestParsers.queryParsers,
requestParser, parseFieldMatcher);
searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext,
searchRequestParsers.aggParsers, searchRequestParsers.suggesters));
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers));
multiRequest.add(searchRequest);
} catch (IOException e) {
throw new ElasticsearchParseException("Exception when parsing search request", e);

View File

@ -102,7 +102,8 @@ public class RestSearchAction extends BaseRestHandler {
if (restContent != null) {
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
QueryParseContext context = new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher);
searchRequest.source().parseXContent(context, searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequest.source().parseXContent(context, searchRequestParsers.aggParsers, searchRequestParsers.suggesters,
searchRequestParsers.searchExtParsers);
}
}

View File

@ -0,0 +1,51 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec;
/**
* Intermediate serializable representation of a search ext section. To be subclassed by plugins that support
* a custom section as part of a search request, which will be provided within the ext element.
* Any state needs to be serialized as part of the {@link Writeable#writeTo(StreamOutput)} method and
* read from the incoming stream, usually done adding a constructor that takes {@link StreamInput} as
* an argument.
*
* Registration happens through {@link SearchPlugin#getSearchExts()}, which also needs a {@link SearchExtParser} that's able to parse
* the incoming request from the REST layer into the proper {@link SearchExtBuilder} subclass.
*
* {@link #getWriteableName()} must return the same name as the one used for the registration
* of the {@link SearchExtSpec}.
*
* @see SearchExtParser
* @see SearchExtSpec
*/
public abstract class SearchExtBuilder implements NamedWriteable, ToXContent {
public abstract int hashCode();
public abstract boolean equals(Object obj);
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
* Defines a parser that is able to parse {@link org.elasticsearch.search.SearchExtBuilder}s
* from {@link org.elasticsearch.common.xcontent.XContent}.
*
* Registration happens through {@link org.elasticsearch.plugins.SearchPlugin#getSearchExts()}, which also needs a {@link SearchExtBuilder}
* implementation which is the object that this parser returns when reading an incoming request form the REST layer.
*
* @see SearchExtBuilder
* @see org.elasticsearch.plugins.SearchPlugin.SearchExtSpec
*/
@FunctionalInterface
public interface SearchExtParser<T extends SearchExtBuilder> {
/**
* Parses the supported element placed within the ext section of a search request
*/
T fromXContent(XContentParser parser) throws IOException;
}

View File

@ -19,13 +19,14 @@
package org.elasticsearch.search;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
/**
*
* Extensions to ParseFieldRegistry to make Guice happy.
*/
public interface SearchParseElement {
public class SearchExtRegistry extends ParseFieldRegistry<SearchExtParser> {
void parse(XContentParser parser, SearchContext context) throws Exception;
public SearchExtRegistry() {
super("ext");
}
}

View File

@ -93,6 +93,7 @@ import org.elasticsearch.plugins.SearchPlugin.FetchPhaseConstructionContext;
import org.elasticsearch.plugins.SearchPlugin.PipelineAggregationSpec;
import org.elasticsearch.plugins.SearchPlugin.QuerySpec;
import org.elasticsearch.plugins.SearchPlugin.ScoreFunctionSpec;
import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec;
import org.elasticsearch.plugins.SearchPlugin.SearchExtensionSpec;
import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.aggregations.AggregationBuilder;
@ -306,6 +307,7 @@ public class SearchModule extends AbstractModule {
"moving_avg_model");
private final List<FetchSubPhase> fetchSubPhases = new ArrayList<>();
private final SearchExtRegistry searchExtParserRegistry = new SearchExtRegistry();
private final Settings settings;
private final List<Entry> namedWriteables = new ArrayList<>();
@ -326,8 +328,9 @@ public class SearchModule extends AbstractModule {
registerAggregations(plugins);
registerPipelineAggregations(plugins);
registerFetchSubPhases(plugins);
registerSearchExts(plugins);
registerShapes();
searchRequestParsers = new SearchRequestParsers(queryParserRegistry, aggregatorParsers, getSuggesters());
searchRequestParsers = new SearchRequestParsers(queryParserRegistry, aggregatorParsers, getSuggesters(), searchExtParserRegistry);
}
public List<Entry> getNamedWriteables() {
@ -380,6 +383,7 @@ public class SearchModule extends AbstractModule {
if (false == transportClient) {
bind(IndicesQueriesRegistry.class).toInstance(queryParserRegistry);
bind(SearchRequestParsers.class).toInstance(searchRequestParsers);
bind(SearchExtRegistry.class).toInstance(searchExtParserRegistry);
configureSearch();
}
}
@ -725,6 +729,15 @@ public class SearchModule extends AbstractModule {
registerFromPlugin(plugins, p -> p.getFetchSubPhases(context), this::registerFetchSubPhase);
}
private void registerSearchExts(List<SearchPlugin> plugins) {
registerFromPlugin(plugins, SearchPlugin::getSearchExts, this::registerSearchExt);
}
private void registerSearchExt(SearchExtSpec<?> spec) {
searchExtParserRegistry.register(spec.getParser(), spec.getName());
namedWriteables.add(new Entry(SearchExtBuilder.class, spec.getName().getPreferredName(), spec.getReader()));
}
private void registerFetchSubPhase(FetchSubPhase subPhase) {
Class<?> subPhaseClass = subPhase.getClass();
if (fetchSubPhases.stream().anyMatch(p -> p.getClass().equals(subPhaseClass))) {

View File

@ -21,22 +21,18 @@ package org.elasticsearch.search;
import org.elasticsearch.search.internal.SearchContext;
import java.util.Collections;
import java.util.Map;
/**
*
* Represents a phase of a search request e.g. query, fetch etc.
*/
public interface SearchPhase {
default Map<String, ? extends SearchParseElement> parseElements() {
return Collections.emptyMap();
}
/**
* Performs pre processing of the search context before the execute.
*/
void preProcess(SearchContext context);
/**
* Executes the search phase
*/
void execute(SearchContext context);
}

View File

@ -37,7 +37,8 @@ public class SearchRequestParsers {
/**
* Query parsers that may be used in search requests.
* @see org.elasticsearch.index.query.QueryParseContext
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, Suggesters)
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
* Suggesters, SearchExtRegistry)
*/
public final IndicesQueriesRegistry queryParsers;
@ -45,20 +46,29 @@ public class SearchRequestParsers {
// and pipeline agg parsers should be here
/**
* Agg and pipeline agg parsers that may be used in search requests.
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, Suggesters)
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
* Suggesters, SearchExtRegistry)
*/
public final AggregatorParsers aggParsers;
// TODO: Suggesters should be removed and the underlying map moved here
/**
* Suggesters that may be used in search requests.
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, Suggesters)
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
* Suggesters, SearchExtRegistry)
*/
public final Suggesters suggesters;
public SearchRequestParsers(IndicesQueriesRegistry queryParsers, AggregatorParsers aggParsers, Suggesters suggesters) {
/**
* Pluggable section that can be parsed out of a search section, within the ext element
*/
public final SearchExtRegistry searchExtParsers;
public SearchRequestParsers(IndicesQueriesRegistry queryParsers, AggregatorParsers aggParsers, Suggesters suggesters,
SearchExtRegistry searchExtParsers) {
this.queryParsers = queryParsers;
this.aggParsers = aggParsers;
this.suggesters = suggesters;
this.searchExtParsers = searchExtParsers;
}
}

View File

@ -39,9 +39,6 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
@ -67,8 +64,6 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.DefaultSearchContext;
@ -102,7 +97,6 @@ import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicLong;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
@ -145,8 +139,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
private final ConcurrentMapLong<SearchContext> activeContexts = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
private final Map<String, SearchParseElement> elementParsers;
private final ParseFieldMatcher parseFieldMatcher;
@Inject
@ -165,12 +157,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings);
this.defaultKeepAlive = DEFAULT_KEEPALIVE_SETTING.get(settings).millis();
Map<String, SearchParseElement> elementParsers = new HashMap<>();
elementParsers.putAll(dfsPhase.parseElements());
elementParsers.putAll(queryPhase.parseElements());
elementParsers.putAll(fetchPhase.parseElements());
this.elementParsers = unmodifiableMap(elementParsers);
this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval, Names.SAME);
defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings);
@ -465,7 +451,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
throw ExceptionsHelper.convertToRuntime(e);
}
operationListener.onFetchPhase(context, System.nanoTime() - time2);
return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()), context.shardTarget());
return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()),
context.shardTarget());
} catch (Exception e) {
logger.trace("Fetch phase failed", e);
processFailure(context, e);
@ -736,11 +723,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
context.fetchSourceContext(source.fetchSource());
}
if (source.docValueFields() != null) {
DocValueFieldsContext docValuesFieldsContext = context.getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY);
for (String field : source.docValueFields()) {
docValuesFieldsContext.add(new DocValueField(field));
}
docValuesFieldsContext.setHitExecutionNeeded(true);
context.docValueFieldsContext(new DocValueFieldsContext(source.docValueFields()));
}
if (source.highlighter() != null) {
HighlightBuilder highlightBuilder = source.highlighter();
@ -758,47 +741,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
}
}
if (source.ext() != null) {
XContentParser extParser = null;
try {
extParser = XContentFactory.xContent(source.ext()).createParser(source.ext());
if (extParser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new SearchParseException(context, "expected start object, found [" + extParser.currentToken() + "] instead",
extParser.getTokenLocation());
}
XContentParser.Token token;
String currentFieldName = null;
while ((token = extParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = extParser.currentName();
} else {
SearchParseElement parseElement = this.elementParsers.get(currentFieldName);
if (parseElement == null) {
if (currentFieldName != null && currentFieldName.equals("suggest")) {
throw new SearchParseException(context,
"suggest is not supported in [ext], please use SearchSourceBuilder#suggest(SuggestBuilder) instead",
extParser.getTokenLocation());
}
throw new SearchParseException(context, "Unknown element [" + currentFieldName + "] in [ext]",
extParser.getTokenLocation());
} else {
parseElement.parse(extParser, context);
}
}
}
} catch (Exception e) {
String sSource = "_na_";
try {
sSource = source.toString();
} catch (Exception inner) {
e.addSuppressed(inner);
// ignore
}
XContentLocation location = extParser != null ? extParser.getTokenLocation() : null;
throw new SearchParseException(context, "failed to parse ext source [" + sSource + "]", location, e);
} finally {
if (extParser != null) {
extParser.close();
}
for (SearchExtBuilder searchExtBuilder : source.ext()) {
context.addSearchExt(searchExtBuilder);
}
}
if (source.version() != null) {
@ -914,7 +858,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
continue;
}
if ((time - lastAccessTime > context.keepAlive())) {
logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time, lastAccessTime, context.keepAlive());
logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time,
lastAccessTime, context.keepAlive());
freeContext(context.id());
}
}

View File

@ -40,7 +40,7 @@ import java.util.Collections;
import java.util.List;
/**
*
* Aggregation phase of a search request, used to collect aggregations
*/
public class AggregationPhase implements SearchPhase {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.aggregations.metrics.tophits;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.Aggregator;
@ -29,9 +28,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SubSearchContext;
@ -98,12 +96,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
subSearchContext.storedFieldsContext(storedFieldsContext);
}
if (docValueFields != null) {
DocValueFieldsContext docValueFieldsContext = subSearchContext
.getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY);
for (String field : docValueFields) {
docValueFieldsContext.add(new DocValueField(field));
}
docValueFieldsContext.setHitExecutionNeeded(true);
subSearchContext.docValueFieldsContext(new DocValueFieldsContext(docValueFields));
}
if (scriptFields != null) {
for (ScriptField field : scriptFields) {

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -33,13 +32,14 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchExtParser;
import org.elasticsearch.search.SearchExtRegistry;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorParsers;
@ -108,9 +108,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
public static final ParseField SLICE = new ParseField("slice");
public static SearchSourceBuilder fromXContent(QueryParseContext context, AggregatorParsers aggParsers,
Suggesters suggesters) throws IOException {
Suggesters suggesters, SearchExtRegistry searchExtRegistry) throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.parseXContent(context, aggParsers, suggesters);
builder.parseXContent(context, aggParsers, suggesters, searchExtRegistry);
return builder;
}
@ -164,13 +164,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
private SuggestBuilder suggestBuilder;
private List<RescoreBuilder<?>> rescoreBuilders;
private List<RescoreBuilder> rescoreBuilders;
private ObjectFloatHashMap<String> indexBoost = null;
private List<String> stats;
private BytesReference ext = null;
private List<SearchExtBuilder> extBuilders = Collections.emptyList();
private boolean profile = false;
@ -203,18 +203,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
if (in.readBoolean()) {
int size = in.readVInt();
rescoreBuilders = new ArrayList<>();
for (int i = 0; i < size; i++) {
rescoreBuilders.add(in.readNamedWriteable(RescoreBuilder.class));
}
rescoreBuilders = in.readNamedWriteableList(RescoreBuilder.class);
}
if (in.readBoolean()) {
int size = in.readVInt();
scriptFields = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
scriptFields.add(new ScriptField(in));
}
scriptFields = in.readList(ScriptField::new);
}
size = in.readVInt();
if (in.readBoolean()) {
@ -225,18 +217,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
}
if (in.readBoolean()) {
int size = in.readVInt();
stats = new ArrayList<>();
for (int i = 0; i < size; i++) {
stats.add(in.readString());
}
stats = in.readList(StreamInput::readString);
}
suggestBuilder = in.readOptionalWriteable(SuggestBuilder::new);
terminateAfter = in.readVInt();
timeout = in.readOptionalWriteable(TimeValue::new);
trackScores = in.readBoolean();
version = in.readOptionalBoolean();
ext = in.readOptionalBytesReference();
extBuilders = in.readNamedWriteableList(SearchExtBuilder.class);
profile = in.readBoolean();
searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new);
sliceBuilder = in.readOptionalWriteable(SliceBuilder::new);
@ -262,18 +250,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
boolean hasRescoreBuilders = rescoreBuilders != null;
out.writeBoolean(hasRescoreBuilders);
if (hasRescoreBuilders) {
out.writeVInt(rescoreBuilders.size());
for (RescoreBuilder<?> rescoreBuilder : rescoreBuilders) {
out.writeNamedWriteable(rescoreBuilder);
}
out.writeNamedWriteableList(rescoreBuilders);
}
boolean hasScriptFields = scriptFields != null;
out.writeBoolean(hasScriptFields);
if (hasScriptFields) {
out.writeVInt(scriptFields.size());
for (ScriptField scriptField : scriptFields) {
scriptField.writeTo(out);
}
out.writeList(scriptFields);
}
out.writeVInt(size);
boolean hasSorts = sorts != null;
@ -287,17 +269,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
boolean hasStats = stats != null;
out.writeBoolean(hasStats);
if (hasStats) {
out.writeVInt(stats.size());
for (String stat : stats) {
out.writeString(stat);
}
out.writeStringList(stats);
}
out.writeOptionalWriteable(suggestBuilder);
out.writeVInt(terminateAfter);
out.writeOptionalWriteable(timeout);
out.writeBoolean(trackScores);
out.writeOptionalBoolean(version);
out.writeOptionalBytesReference(ext);
out.writeNamedWriteableList(extBuilders);
out.writeBoolean(profile);
out.writeOptionalWriteable(searchAfterBuilder);
out.writeOptionalWriteable(sliceBuilder);
@ -649,7 +628,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
/**
* Gets the bytes representing the rescore builders for this request.
*/
public List<RescoreBuilder<?>> rescores() {
public List<RescoreBuilder> rescores() {
return rescoreBuilders;
}
@ -875,13 +854,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
return stats;
}
public SearchSourceBuilder ext(XContentBuilder ext) {
this.ext = ext.bytes();
public SearchSourceBuilder ext(List<SearchExtBuilder> searchExtBuilders) {
this.extBuilders = Objects.requireNonNull(searchExtBuilders, "searchExtBuilders must not be null");
return this;
}
public BytesReference ext() {
return ext;
public List<SearchExtBuilder> ext() {
return extBuilders;
}
/**
@ -919,7 +898,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
SearchSourceBuilder rewrittenBuilder = new SearchSourceBuilder();
rewrittenBuilder.aggregations = aggregations;
rewrittenBuilder.explain = explain;
rewrittenBuilder.ext = ext;
rewrittenBuilder.extBuilders = extBuilders;
rewrittenBuilder.fetchSourceContext = fetchSourceContext;
rewrittenBuilder.docValueFields = docValueFields;
rewrittenBuilder.storedFieldsContext = storedFieldsContext;
@ -948,17 +927,18 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
/**
* Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up
* different defaults than a regular SearchSourceBuilder would have and use
* {@link #fromXContent(QueryParseContext, AggregatorParsers, Suggesters)} if you have normal defaults.
* {@link #fromXContent(QueryParseContext, AggregatorParsers, Suggesters, SearchExtRegistry)} if you have normal defaults.
*/
public void parseXContent(QueryParseContext context, AggregatorParsers aggParsers, Suggesters suggesters)
public void parseXContent(QueryParseContext context, AggregatorParsers aggParsers,
Suggesters suggesters, SearchExtRegistry searchExtRegistry)
throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
String currentFieldName = null;
if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] but found [" + token + "]",
parser.getTokenLocation());
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT +
"] but found [" + token + "]", parser.getTokenLocation());
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -1017,8 +997,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (token.isValue()) {
indexBoost.put(currentFieldName, parser.floatValue());
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation());
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token +
" in [" + currentFieldName + "].", parser.getTokenLocation());
}
}
} else if (context.getParseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)
@ -1034,8 +1014,23 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
rescoreBuilders = new ArrayList<>();
rescoreBuilders.add(RescoreBuilder.parseFromXContent(context));
} else if (context.getParseFieldMatcher().match(currentFieldName, EXT_FIELD)) {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
ext = xContentBuilder.bytes();
extBuilders = new ArrayList<>();
String extSectionName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
extSectionName = parser.currentName();
} else {
SearchExtParser searchExtParser = searchExtRegistry.lookup(extSectionName,
context.getParseFieldMatcher(), parser.getTokenLocation());
SearchExtBuilder searchExtBuilder = searchExtParser.fromXContent(parser);
if (searchExtBuilder.getWriteableName().equals(extSectionName) == false) {
throw new IllegalStateException("The parsed [" + searchExtBuilder.getClass().getName() + "] object has a "
+ "different writeable name compared to the name of the section that it was parsed from: found ["
+ searchExtBuilder.getWriteableName() + "] expected [" + extSectionName + "]");
}
extBuilders.add(searchExtBuilder);
}
}
} else if (context.getParseFieldMatcher().match(currentFieldName, SLICE)) {
sliceBuilder = SliceBuilder.fromXContent(context);
} else {
@ -1051,8 +1046,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
if (token == XContentParser.Token.VALUE_STRING) {
docValueFields.add(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
}
}
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
@ -1068,8 +1063,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
if (token == XContentParser.Token.VALUE_STRING) {
stats.add(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
}
}
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
@ -1221,12 +1216,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
builder.field(STATS_FIELD.getPreferredName(), stats);
}
if (ext != null) {
builder.field(EXT_FIELD.getPreferredName());
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(ext)) {
parser.nextToken();
builder.copyCurrentStructure(parser);
if (extBuilders != null) {
builder.startObject(EXT_FIELD.getPreferredName());
for (SearchExtBuilder extBuilder : extBuilders) {
extBuilder.toXContent(builder, params);
}
builder.endObject();
}
}
@ -1344,9 +1339,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
@Override
public int hashCode() {
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from,
highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile);
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder,
indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder,
sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile, extBuilders);
}
@Override
@ -1381,7 +1376,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
&& Objects.equals(timeout, other.timeout)
&& Objects.equals(trackScores, other.trackScores)
&& Objects.equals(version, other.version)
&& Objects.equals(profile, other.profile);
&& Objects.equals(profile, other.profile)
&& Objects.equals(extBuilders, other.extBuilders);
}
}

View File

@ -37,7 +37,8 @@ import java.util.Collection;
import java.util.Iterator;
/**
*
* Dfs phase of a search request, used to make scoring 100% accurate by collecting additional info from each shard before the query phase.
* The additional information is used to better compare the scores coming from all the shards, which depend on local factors (e.g. idf)
*/
public class DfsPhase implements SearchPhase {

View File

@ -43,7 +43,6 @@ import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase;
@ -62,11 +61,11 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder;
/**
*
* Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified
* after reducing all of the matches returned by the query phase
*/
public class FetchPhase implements SearchPhase {
@ -77,15 +76,6 @@ public class FetchPhase implements SearchPhase {
this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsFetchSubPhase(this);
}
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
Map<String, SearchParseElement> parseElements = new HashMap<>();
for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
parseElements.putAll(fetchSubPhase.parseElements());
}
return unmodifiableMap(parseElements);
}
@Override
public void preProcess(SearchContext context) {
}

View File

@ -22,11 +22,9 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -69,10 +67,6 @@ public interface FetchSubPhase {
return searcher.getIndexReader();
}
public IndexSearcher topLevelSearcher() {
return searcher;
}
public Map<String, Object> cache() {
if (cache == null) {
cache = new HashMap<>();
@ -82,10 +76,6 @@ public interface FetchSubPhase {
}
default Map<String, ? extends SearchParseElement> parseElements() {
return Collections.emptyMap();
}
/**
* Executes the hit level phase, with a reader and doc id (note, its a low level reader, and the matching doc).
*/
@ -93,23 +83,4 @@ public interface FetchSubPhase {
default void hitsExecute(SearchContext context, InternalSearchHit[] hits) {}
/**
* This interface is in the fetch phase plugin mechanism.
* Whenever a new search is executed we create a new {@link SearchContext} that holds individual contexts for each {@link org.elasticsearch.search.fetch.FetchSubPhase}.
* Fetch phases that use the plugin mechanism must provide a ContextFactory to the SearchContext that creates the fetch phase context and also associates them with a name.
* See {@link SearchContext#getFetchSubPhaseContext(FetchSubPhase.ContextFactory)}
*/
interface ContextFactory<SubPhaseContext extends FetchSubPhaseContext> {
/**
* The name of the context.
*/
String getName();
/**
* Creates a new instance of a FetchSubPhaseContext that holds all information a FetchSubPhase needs to execute on hits.
*/
SubPhaseContext newContextInstance();
}
}

View File

@ -1,49 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
/**
* All configuration and context needed by the FetchSubPhase to execute on hits.
* The only required information in this base class is whether or not the sub phase needs to be run at all.
* It can be extended by FetchSubPhases to hold information the phase needs to execute on hits.
* See {@link org.elasticsearch.search.fetch.FetchSubPhase.ContextFactory} and also {@link DocValueFieldsContext} for an example.
*/
public class FetchSubPhaseContext {
// This is to store if the FetchSubPhase should be executed at all.
private boolean hitExecutionNeeded = false;
/**
* Set if this phase should be executed at all.
*/
public void setHitExecutionNeeded(boolean hitExecutionNeeded) {
this.hitExecutionNeeded = hitExecutionNeeded;
}
/**
* Returns if this phase be executed at all.
*/
public boolean hitExecutionNeeded() {
return hitExecutionNeeded;
}
}

View File

@ -18,38 +18,23 @@
*/
package org.elasticsearch.search.fetch.subphase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import java.util.ArrayList;
import java.util.List;
/**
* All the required context to pull a field from the doc values.
*/
public class DocValueFieldsContext extends FetchSubPhaseContext {
public class DocValueFieldsContext {
public static class DocValueField {
private final String name;
private final List<String> fields;
public DocValueField(String name) {
this.name = name;
}
public String name() {
return name;
}
public DocValueFieldsContext(List<String> fields) {
this.fields = fields;
}
private List<DocValueField> fields = new ArrayList<>();
public DocValueFieldsContext() {
}
public void add(DocValueField field) {
this.fields.add(field);
}
public List<DocValueField> fields() {
/**
* Returns the required docvalue fields
*/
public List<String> fields() {
return this.fields;
}
}

View File

@ -36,35 +36,21 @@ import java.util.HashMap;
*/
public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
public static final String NAME = "docvalue_fields";
public static final ContextFactory<DocValueFieldsContext> CONTEXT_FACTORY = new ContextFactory<DocValueFieldsContext>() {
@Override
public String getName() {
return NAME;
}
@Override
public DocValueFieldsContext newContextInstance() {
return new DocValueFieldsContext();
}
};
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
if (context.getFetchSubPhaseContext(CONTEXT_FACTORY).hitExecutionNeeded() == false) {
if (context.docValueFieldsContext() == null) {
return;
}
for (DocValueFieldsContext.DocValueField field : context.getFetchSubPhaseContext(CONTEXT_FACTORY).fields()) {
for (String field : context.docValueFieldsContext().fields()) {
if (hitContext.hit().fieldsOrNull() == null) {
hitContext.hit().fields(new HashMap<>(2));
}
SearchHitField hitField = hitContext.hit().fields().get(field.name());
SearchHitField hitField = hitContext.hit().fields().get(field);
if (hitField == null) {
hitField = new InternalSearchHitField(field.name(), new ArrayList<>(2));
hitContext.hit().fields().put(field.name(), hitField);
hitField = new InternalSearchHitField(field, new ArrayList<>(2));
hitContext.hit().fields().put(field, hitField);
}
MappedFieldType fieldType = context.mapperService().fullName(field.name());
MappedFieldType fieldType = context.mapperService().fullName(field);
if (fieldType != null) {
AtomicFieldData data = context.fieldData().getForField(fieldType).load(hitContext.readerContext());
ScriptDocValues values = data.getScriptValues();

View File

@ -50,17 +50,17 @@ import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
@ -80,9 +80,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
*/
public class DefaultSearchContext extends SearchContext {
private final long id;
@ -110,6 +107,7 @@ public class DefaultSearchContext extends SearchContext {
private StoredFieldsContext storedFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private DocValueFieldsContext docValueFieldsContext;
private int from = -1;
private int size = -1;
private SortAndFormats sort;
@ -148,7 +146,7 @@ public class DefaultSearchContext extends SearchContext {
private volatile long lastAccessTime = -1;
private Profilers profilers;
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
private final Map<String, SearchExtBuilder> searchExtBuilders = new HashMap<>();
private final Map<Class<?>, Collector> queryCollectors = new HashMap<>();
private final QueryShardContext queryShardContext;
private FetchPhase fetchPhase;
@ -388,14 +386,16 @@ public class DefaultSearchContext extends SearchContext {
}
@Override
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
String subPhaseName = contextFactory.getName();
if (subPhaseContexts.get(subPhaseName) == null) {
subPhaseContexts.put(subPhaseName, contextFactory.newContextInstance());
}
return (SubPhaseContext) subPhaseContexts.get(subPhaseName);
public void addSearchExt(SearchExtBuilder searchExtBuilder) {
//it's ok to use the writeable name here given that we enforce it to be the same as the name of the element that gets
//parsed by the corresponding parser. There is one single name and one single way to retrieve the parsed object from the context.
searchExtBuilders.put(searchExtBuilder.getWriteableName(), searchExtBuilder);
}
@Override
public SearchExtBuilder getSearchExt(String name) {
return searchExtBuilders.get(name);
}
@Override
public SearchContextHighlight highlight() {
@ -470,6 +470,17 @@ public class DefaultSearchContext extends SearchContext {
return this;
}
@Override
public DocValueFieldsContext docValueFieldsContext() {
return docValueFieldsContext;
}
@Override
public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) {
this.docValueFieldsContext = docValueFieldsContext;
return this;
}
@Override
public ContextIndexSearcher searcher() {
return this.searcher;

View File

@ -35,17 +35,16 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
@ -512,8 +511,13 @@ public abstract class FilteredSearchContext extends SearchContext {
}
@Override
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
return in.getFetchSubPhaseContext(contextFactory);
public void addSearchExt(SearchExtBuilder searchExtBuilder) {
in.addSearchExt(searchExtBuilder);
}
@Override
public SearchExtBuilder getSearchExt(String name) {
return in.getSearchExt(name);
}
@Override

View File

@ -22,9 +22,7 @@ package org.elasticsearch.search.internal;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.RefCount;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
@ -43,17 +41,17 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
@ -187,7 +185,9 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract SearchContext aggregations(SearchContextAggregations aggregations);
public abstract <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory);
public abstract void addSearchExt(SearchExtBuilder searchExtBuilder);
public abstract SearchExtBuilder getSearchExt(String name);
public abstract SearchContextHighlight highlight();
@ -226,6 +226,10 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext);
public abstract DocValueFieldsContext docValueFieldsContext();
public abstract SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext);
public abstract ContextIndexSearcher searcher();
public abstract IndexShard indexShard();

View File

@ -25,6 +25,7 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
@ -36,8 +37,6 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.List;
/**
*/
public class SubSearchContext extends FilteredSearchContext {
// By default return 3 hits per bucket. A higher default would make the response really large by default, since
@ -60,6 +59,7 @@ public class SubSearchContext extends FilteredSearchContext {
private StoredFieldsContext storedFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private DocValueFieldsContext docValueFieldsContext;
private SearchContextHighlight highlight;
private boolean explain;
@ -154,6 +154,17 @@ public class SubSearchContext extends FilteredSearchContext {
return this;
}
@Override
public DocValueFieldsContext docValueFieldsContext() {
return docValueFieldsContext;
}
@Override
public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) {
this.docValueFieldsContext = docValueFieldsContext;
return this;
}
@Override
public void timeout(TimeValue timeout) {
throw new UnsupportedOperationException("Not supported");

View File

@ -68,7 +68,8 @@ import java.util.List;
import java.util.concurrent.Callable;
/**
*
* Query phase of a search request, used to run the query and get back from each shard information about the matching documents
* (document ids and score or sort criteria) so that matches can be reduced on the coordinating node
*/
public class QueryPhase implements SearchPhase {

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* Rescore phase of a search request, used to run potentially expensive scoring models against the top matching documents.
*/
public class RescorePhase extends AbstractComponent implements SearchPhase {

View File

@ -35,6 +35,7 @@ import java.util.List;
import java.util.Map;
/**
* Suggest phase of a search request, used to collect suggestions
*/
public class SuggestPhase extends AbstractComponent implements SearchPhase {

View File

@ -175,6 +175,6 @@ public class MultiSearchRequestTests extends ESTestCase {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry();
QueryParser<MatchAllQueryBuilder> parser = MatchAllQueryBuilder::fromXContent;
registry.register(parser, MatchAllQueryBuilder.NAME);
return new SearchRequestParsers(registry, null, null);
return new SearchRequestParsers(registry, null, null, null);
}
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.search.fetch.FetchSubPhasePluginIT;
import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -36,6 +37,7 @@ import org.junit.BeforeClass;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
@ -53,7 +55,8 @@ public class SearchRequestTests extends ESTestCase {
bindMapperExtension();
}
};
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()) {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false,
Collections.singletonList(new FetchSubPhasePluginIT.FetchTermVectorsPlugin())) {
@Override
protected void configureSearch() {
// Skip me

View File

@ -59,6 +59,7 @@ import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.fetch.FetchSubPhasePluginIT;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests;
import org.elasticsearch.search.rescore.QueryRescoreBuilderTests;
@ -85,7 +86,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.CoreMatchers.containsString;
@ -132,7 +132,8 @@ public class SearchSourceBuilderTests extends ESTestCase {
bindMapperExtension();
}
};
SearchModule searchModule = new SearchModule(settings, false, emptyList()) {
SearchModule searchModule = new SearchModule(settings, false,
Collections.singletonList(new FetchSubPhasePluginIT.FetchTermVectorsPlugin())) {
@Override
protected void configureSearch() {
// Skip me
@ -389,11 +390,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
builder.aggregation(AggregationBuilders.avg(randomAsciiOfLengthBetween(5, 20)));
}
if (randomBoolean()) {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder();
xContentBuilder.startObject();
xContentBuilder.field("term_vectors_fetch", randomAsciiOfLengthBetween(5, 20));
xContentBuilder.endObject();
builder.ext(xContentBuilder);
builder.ext(Collections.singletonList(new FetchSubPhasePluginIT.TermVectorsFetchBuilder("test")));
}
if (randomBoolean()) {
String field = randomBoolean() ? null : randomAsciiOfLengthBetween(5, 20);
@ -431,15 +428,14 @@ public class SearchSourceBuilderTests extends ESTestCase {
// test the embedded case
}
SearchSourceBuilder newBuilder = SearchSourceBuilder.fromXContent(parseContext, searchRequestParsers.aggParsers,
searchRequestParsers.suggesters);
searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertNull(parser.nextToken());
assertEquals(testBuilder, newBuilder);
assertEquals(testBuilder.hashCode(), newBuilder.hashCode());
}
private static QueryParseContext createParseContext(XContentParser parser) {
QueryParseContext context = new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher);
return context;
return new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher);
}
public void testSerialization() throws IOException {
@ -497,7 +493,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
String restContent = " { \"_source\": { \"includes\": \"include\", \"excludes\": \"*.field2\"}}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertArrayEquals(new String[]{"*.field2"}, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{"include"}, searchSourceBuilder.fetchSource().includes());
}
@ -506,7 +502,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
String restContent = " { \"_source\": false}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().includes());
assertFalse(searchSourceBuilder.fetchSource().fetchSource());
@ -519,7 +515,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
String restContent = " { \"sort\": \"foo\"}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(1, searchSourceBuilder.sorts().size());
assertEquals(new FieldSortBuilder("foo"), searchSourceBuilder.sorts().get(0));
}
@ -535,7 +531,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
" ]}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(5, searchSourceBuilder.sorts().size());
assertEquals(new FieldSortBuilder("post_date"), searchSourceBuilder.sorts().get(0));
assertEquals(new FieldSortBuilder("user"), searchSourceBuilder.sorts().get(1));
@ -559,7 +555,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
"}\n";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(1, searchSourceBuilder.aggregations().count());
}
}
@ -575,7 +571,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
"}\n";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(1, searchSourceBuilder.aggregations().count());
}
}
@ -601,7 +597,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
"}\n";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(1, searchSourceBuilder.rescores().size());
assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50),
searchSourceBuilder.rescores().get(0));
@ -624,7 +620,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
"}\n";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(1, searchSourceBuilder.rescores().size());
assertEquals(new QueryRescorerBuilder(QueryBuilders.matchQuery("content", "baz")).windowSize(50),
searchSourceBuilder.rescores().get(0));
@ -637,7 +633,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
final String query = "{ \"query\": { \"match_all\": {}}, \"timeout\": \"" + timeout + "\"}";
try (XContentParser parser = XContentFactory.xContent(query).createParser(query)) {
final SearchSourceBuilder builder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertThat(builder.timeout(), equalTo(TimeValue.parseTimeValue(timeout, null, "timeout")));
}
}
@ -650,7 +646,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
expectThrows(
ElasticsearchParseException.class,
() -> SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters));
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers));
assertThat(e, hasToString(containsString("unit is missing or unrecognized")));
}
}

View File

@ -26,14 +26,18 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchExtParser;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
@ -48,21 +52,27 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.CoreMatchers.equalTo;
@ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1)
@ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 2)
public class FetchSubPhasePluginIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(FetchTermVectorsPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
@SuppressWarnings("unchecked")
public void testPlugin() throws Exception {
client().admin()
.indices()
@ -84,12 +94,11 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh().execute().actionGet();
XContentBuilder extSource = jsonBuilder().startObject()
.field("term_vectors_fetch", "test")
.endObject();
SearchResponse response = client().prepareSearch().setSource(new SearchSourceBuilder().ext(extSource)).get();
SearchResponse response = client().prepareSearch().setSource(new SearchSourceBuilder()
.ext(Collections.singletonList(new TermVectorsFetchBuilder("test")))).get();
assertSearchResponse(response);
assertThat(((Map<String, Integer>) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("i"), equalTo(2));
assertThat(((Map<String, Integer>) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("i"),
equalTo(2));
assertThat(((Map<String, Integer>) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("am"),
equalTo(2));
assertThat(((Map<String, Integer>) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("sam"),
@ -101,46 +110,35 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return singletonList(new TermVectorsFetchSubPhase());
}
@Override
public List<SearchExtSpec<?>> getSearchExts() {
return Collections.singletonList(new SearchExtSpec<>(TermVectorsFetchSubPhase.NAME,
TermVectorsFetchBuilder::new, TermVectorsFetchParser.INSTANCE));
}
}
public static final class TermVectorsFetchSubPhase implements FetchSubPhase {
public static final ContextFactory<TermVectorsFetchContext> CONTEXT_FACTORY = new ContextFactory<TermVectorsFetchContext>() {
@Override
public String getName() {
return NAMES[0];
}
@Override
public TermVectorsFetchContext newContextInstance() {
return new TermVectorsFetchContext();
}
};
public static final String[] NAMES = {"term_vectors_fetch"};
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
return singletonMap("term_vectors_fetch", new TermVectorsFetchParseElement());
}
private static final String NAME = "term_vectors_fetch";
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
if (context.getFetchSubPhaseContext(CONTEXT_FACTORY).hitExecutionNeeded() == false) {
TermVectorsFetchBuilder fetchSubPhaseBuilder = (TermVectorsFetchBuilder)context.getSearchExt(NAME);
if (fetchSubPhaseBuilder == null) {
return;
}
String field = context.getFetchSubPhaseContext(CONTEXT_FACTORY).getField();
String field = fetchSubPhaseBuilder.getField();
if (hitContext.hit().fieldsOrNull() == null) {
hitContext.hit().fields(new HashMap<>());
}
SearchHitField hitField = hitContext.hit().fields().get(NAMES[0]);
SearchHitField hitField = hitContext.hit().fields().get(NAME);
if (hitField == null) {
hitField = new InternalSearchHitField(NAMES[0], new ArrayList<>(1));
hitContext.hit().fields().put(NAMES[0], hitField);
hitField = new InternalSearchHitField(NAME, new ArrayList<>(1));
hitContext.hit().fields().put(NAME, hitField);
}
TermVectorsResponse termVector = TermVectorsService.getTermVectors(context.indexShard(), new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(), hitContext.hit().type(), hitContext.hit().id()));
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
hitContext.hit().type(), hitContext.hit().id());
TermVectorsResponse termVector = TermVectorsService.getTermVectors(context.indexShard(), termVectorsRequest);
try {
Map<String, Integer> tv = new HashMap<>();
TermsEnum terms = termVector.getFields().terms(field).iterator();
@ -155,36 +153,74 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
}
}
public static class TermVectorsFetchParseElement implements SearchParseElement {
public static final class TermVectorsFetchParser implements SearchExtParser<TermVectorsFetchBuilder> {
private static final TermVectorsFetchParser INSTANCE = new TermVectorsFetchParser();
private TermVectorsFetchParser() {
}
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
TermVectorsFetchContext fetchSubPhaseContext = context.getFetchSubPhaseContext(TermVectorsFetchSubPhase.CONTEXT_FACTORY);
// this is to make sure that the SubFetchPhase knows it should execute
fetchSubPhaseContext.setHitExecutionNeeded(true);
public TermVectorsFetchBuilder fromXContent(XContentParser parser) throws IOException {
String field;
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_STRING) {
String fieldName = parser.text();
fetchSubPhaseContext.setField(fieldName);
field = parser.text();
} else {
throw new IllegalStateException("Expected a VALUE_STRING but got " + token);
throw new ParsingException(parser.getTokenLocation(), "Expected a VALUE_STRING but got " + token);
}
if (field == null) {
throw new ParsingException(parser.getTokenLocation(), "no fields specified for " + TermVectorsFetchSubPhase.NAME);
}
return new TermVectorsFetchBuilder(field);
}
}
public static class TermVectorsFetchContext extends FetchSubPhaseContext {
public static final class TermVectorsFetchBuilder extends SearchExtBuilder {
private final String field;
private String field = null;
public TermVectorsFetchContext() {
public TermVectorsFetchBuilder(String field) {
this.field = field;
}
public void setField(String field) {
this.field = field;
public TermVectorsFetchBuilder(StreamInput in) throws IOException {
this.field = in.readString();
}
public String getField() {
return field;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TermVectorsFetchBuilder that = (TermVectorsFetchBuilder) o;
return Objects.equals(field, that.field);
}
@Override
public int hashCode() {
return Objects.hash(field);
}
@Override
public String getWriteableName() {
return TermVectorsFetchSubPhase.NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.field(TermVectorsFetchSubPhase.NAME, field);
}
}
}

View File

@ -34,12 +34,14 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchRequestTests;
import org.elasticsearch.search.fetch.FetchSubPhasePluginIT;
import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
@ -56,7 +58,8 @@ public class ShardSearchTransportRequestTests extends ESTestCase {
bindMapperExtension();
}
};
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()) {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false,
Collections.singletonList(new FetchSubPhasePluginIT.FetchTermVectorsPlugin())) {
@Override
protected void configureSearch() {
// Skip me

View File

@ -32,14 +32,11 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -86,7 +83,7 @@ public class TransportSearchTemplateAction extends HandledTransportAction<Search
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
SearchSourceBuilder builder = SearchSourceBuilder.searchSource();
builder.parseXContent(new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
searchRequest.source(builder);
searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {

View File

@ -37,9 +37,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -160,8 +158,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction<MultiP
try {
SearchRequest searchRequest = TransportPercolateAction.createSearchRequest(
percolateRequest, docSource, searchRequestParsers.queryParsers,
searchRequestParsers.aggParsers, parseFieldMatcher
);
searchRequestParsers.aggParsers, searchRequestParsers.searchExtParsers, parseFieldMatcher);
multiSearchRequest.add(searchRequest);
} catch (Exception e) {
preFailures.put(i, new MultiPercolateResponse.Item(e));

View File

@ -45,6 +45,7 @@ import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.SearchExtRegistry;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchRequestParsers;
@ -69,7 +70,8 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
public TransportPercolateAction(Settings settings, ThreadPool threadPool, TransportService transportService,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
Client client, SearchRequestParsers searchRequestParsers) {
super(settings, PercolateAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, PercolateRequest::new);
super(settings, PercolateAction.NAME, threadPool, transportService, actionFilters,
indexNameExpressionResolver, PercolateRequest::new);
this.client = client;
this.searchRequestParsers = searchRequestParsers;
this.parseFieldMatcher = new ParseFieldMatcher(settings);
@ -84,7 +86,8 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
if (getResponse.isExists()) {
innerDoExecute(request, getResponse.getSourceAsBytesRef(), listener);
} else {
onFailure(new ResourceNotFoundException("percolate document [{}/{}/{}] doesn't exist", request.getRequest().index(), request.getRequest().type(), request.getRequest().id()));
onFailure(new ResourceNotFoundException("percolate document [{}/{}/{}] doesn't exist",
request.getRequest().index(), request.getRequest().type(), request.getRequest().id()));
}
}
@ -102,7 +105,7 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
SearchRequest searchRequest;
try {
searchRequest = createSearchRequest(request, docSource, searchRequestParsers.queryParsers,
searchRequestParsers.aggParsers, parseFieldMatcher);
searchRequestParsers.aggParsers, searchRequestParsers.searchExtParsers, parseFieldMatcher);
} catch (IOException e) {
listener.onFailure(e);
return;
@ -124,7 +127,10 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
});
}
public static SearchRequest createSearchRequest(PercolateRequest percolateRequest, BytesReference documentSource, IndicesQueriesRegistry queryRegistry, AggregatorParsers aggParsers, ParseFieldMatcher parseFieldMatcher) throws IOException {
public static SearchRequest createSearchRequest(PercolateRequest percolateRequest, BytesReference documentSource,
IndicesQueriesRegistry queryRegistry, AggregatorParsers aggParsers,
SearchExtRegistry searchExtRegistry, ParseFieldMatcher parseFieldMatcher)
throws IOException {
SearchRequest searchRequest = new SearchRequest();
if (percolateRequest.indices() != null) {
searchRequest.indices(percolateRequest.indices());
@ -220,7 +226,7 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(source)) {
QueryParseContext context = new QueryParseContext(queryRegistry, parser, parseFieldMatcher);
searchSourceBuilder.parseXContent(context, aggParsers, null);
searchSourceBuilder.parseXContent(context, aggParsers, null, searchExtRegistry);
searchRequest.source(searchSourceBuilder);
return searchRequest;
}
@ -235,7 +241,8 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
matches = new PercolateResponse.Match[hits.getHits().length];
for (int i = 0; i < hits.getHits().length; i++) {
SearchHit hit = hits.getHits()[i];
matches[i] = new PercolateResponse.Match(new Text(hit.getIndex()), new Text(hit.getId()), hit.getScore(), hit.getHighlightFields());
matches[i] = new PercolateResponse.Match(new Text(hit.getIndex()),
new Text(hit.getId()), hit.getScore(), hit.getHighlightFields());
}
}
@ -246,8 +253,8 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
}
return new PercolateResponse(
searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(),
shardFailures, matches, hits.getTotalHits(), searchResponse.getTookInMillis(), (InternalAggregations) searchResponse.getAggregations()
searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getFailedShards(), shardFailures,
matches, hits.getTotalHits(), searchResponse.getTookInMillis(), (InternalAggregations) searchResponse.getAggregations()
);
}

View File

@ -42,14 +42,11 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.reindex.remote.RemoteInfo;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.suggest.Suggesters;
import java.io.IOException;
import java.util.List;
@ -87,7 +84,8 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
builder.map(source);
try (XContentParser innerParser = parser.contentType().xContent().createParser(builder.bytes())) {
request.getSearchRequest().source().parseXContent(context.queryParseContext(innerParser),
context.searchRequestParsers.aggParsers, context.searchRequestParsers.suggesters);
context.searchRequestParsers.aggParsers, context.searchRequestParsers.suggesters,
context.searchRequestParsers.searchExtParsers);
}
};

View File

@ -110,7 +110,7 @@ public class RestReindexActionTests extends ESTestCase {
}
try (XContentParser p = JsonXContent.jsonXContent.createParser(request)) {
ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest());
SearchRequestParsers searchParsers = new SearchRequestParsers(new IndicesQueriesRegistry(), null, null);
SearchRequestParsers searchParsers = new SearchRequestParsers(new IndicesQueriesRegistry(), null, null, null);
RestReindexAction.PARSER.parse(p, r, new ReindexParseContext(searchParsers, ParseFieldMatcher.STRICT));
assertEquals("localhost", r.getRemoteInfo().getHost());
assertArrayEquals(new String[] {"source"}, r.getSearchRequest().indices());

View File

@ -36,17 +36,17 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
@ -90,7 +90,7 @@ public class TestSearchContext extends SearchContext {
private SearchContextAggregations aggregations;
private final long originNanoTime = System.nanoTime();
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
private final Map<String, SearchExtBuilder> searchExtBuilders = new HashMap<>();
public TestSearchContext(ThreadPool threadPool, BigArrays bigArrays, ScriptService scriptService, IndexService indexService) {
super(ParseFieldMatcher.STRICT);
@ -197,12 +197,13 @@ public class TestSearchContext extends SearchContext {
}
@Override
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
String subPhaseName = contextFactory.getName();
if (subPhaseContexts.get(subPhaseName) == null) {
subPhaseContexts.put(subPhaseName, contextFactory.newContextInstance());
}
return (SubPhaseContext) subPhaseContexts.get(subPhaseName);
public void addSearchExt(SearchExtBuilder searchExtBuilder) {
searchExtBuilders.put(searchExtBuilder.getWriteableName(), searchExtBuilder);
}
@Override
public SearchExtBuilder getSearchExt(String name) {
return searchExtBuilders.get(name);
}
@Override
@ -262,6 +263,16 @@ public class TestSearchContext extends SearchContext {
return null;
}
@Override
public DocValueFieldsContext docValueFieldsContext() {
return null;
}
@Override
public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) {
return null;
}
@Override
public ContextIndexSearcher searcher() {
return searcher;