Merge pull request #12400 from brwe/plug-fetch-sub-phases

Make fetch sub phases pluggable
This commit is contained in:
Britta Weber 2015-08-03 15:19:28 +02:00
commit 26d51c299e
16 changed files with 501 additions and 99 deletions

View File

@ -32,6 +32,7 @@ import org.apache.lucene.util.Counter;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.*;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lease.Releasables;
@ -58,7 +59,7 @@ import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -116,6 +117,7 @@ public class PercolateContext extends SearchContext {
private SearchContextAggregations aggregations;
private QuerySearchResult querySearchResult;
private Sort sort;
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard,
IndexService indexService, PageCacheRecycler pageCacheRecycler,
@ -282,6 +284,15 @@ public class PercolateContext extends SearchContext {
return this;
}
@Override
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
String subPhaseName = contextFactory.getName();
if (subPhaseContexts.get(subPhaseName) == null) {
subPhaseContexts.put(subPhaseName, contextFactory.newContextInstance());
}
return (SubPhaseContext) subPhaseContexts.get(subPhaseName);
}
// Unused:
@Override
public void preProcess() {
@ -378,16 +389,6 @@ public class PercolateContext extends SearchContext {
throw new UnsupportedOperationException();
}
@Override
public boolean hasFieldDataFields() {
throw new UnsupportedOperationException();
}
@Override
public FieldDataFieldsContext fieldDataFields() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasScriptFields() {
throw new UnsupportedOperationException();

View File

@ -32,6 +32,7 @@ import org.elasticsearch.search.aggregations.AggregationModule;
import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseModule;
import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase;
@ -63,7 +64,8 @@ public class SearchModule extends AbstractModule implements SpawnModules {
new HighlightModule(),
new SuggestModule(),
new FunctionScoreModule(),
new AggregationModule());
new AggregationModule(),
new FetchSubPhaseModule());
}
@Override
@ -73,14 +75,6 @@ public class SearchModule extends AbstractModule implements SpawnModules {
bind(SearchPhaseController.class).asEagerSingleton();
bind(FetchPhase.class).asEagerSingleton();
bind(ExplainFetchSubPhase.class).asEagerSingleton();
bind(FieldDataFieldsFetchSubPhase.class).asEagerSingleton();
bind(ScriptFieldsFetchSubPhase.class).asEagerSingleton();
bind(FetchSourceSubPhase.class).asEagerSingleton();
bind(VersionFetchSubPhase.class).asEagerSingleton();
bind(MatchedQueriesFetchSubPhase.class).asEagerSingleton();
bind(HighlightPhase.class).asEagerSingleton();
bind(InnerHitsFetchSubPhase.class).asEagerSingleton();
bind(SearchServiceTransportAction.class).asEagerSingleton();
bind(MoreLikeThisFetchService.class).asEagerSingleton();

View File

@ -45,6 +45,7 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
@ -83,13 +84,10 @@ public class FetchPhase implements SearchPhase {
private final FetchSubPhase[] fetchSubPhases;
@Inject
public FetchPhase(HighlightPhase highlightPhase, ScriptFieldsFetchSubPhase scriptFieldsPhase,
MatchedQueriesFetchSubPhase matchedQueriesPhase, ExplainFetchSubPhase explainPhase, VersionFetchSubPhase versionPhase,
FetchSourceSubPhase fetchSourceSubPhase, FieldDataFieldsFetchSubPhase fieldDataFieldsFetchSubPhase,
InnerHitsFetchSubPhase innerHitsFetchSubPhase) {
public FetchPhase(Set<FetchSubPhase> fetchSubPhases, InnerHitsFetchSubPhase innerHitsFetchSubPhase) {
innerHitsFetchSubPhase.setFetchPhase(this);
this.fetchSubPhases = new FetchSubPhase[]{scriptFieldsPhase, matchedQueriesPhase, explainPhase, highlightPhase,
fetchSourceSubPhase, versionPhase, fieldDataFieldsFetchSubPhase, innerHitsFetchSubPhase};
this.fetchSubPhases = fetchSubPhases.toArray(new FetchSubPhase[fetchSubPhases.size() + 1]);
this.fetchSubPhases[fetchSubPhases.size()] = innerHitsFetchSubPhase;
}
@Override

View File

@ -114,4 +114,23 @@ public interface FetchSubPhase {
boolean hitsExecutionNeeded(SearchContext context);
void hitsExecute(SearchContext context, InternalSearchHit[] hits);
/**
* This interface is in the fetch phase plugin mechanism.
* Whenever a new search is executed we create a new {@link SearchContext} that holds individual contexts for each {@link org.elasticsearch.search.fetch.FetchSubPhase}.
* Fetch phases that use the plugin mechanism must provide a ContextFactory to the SearchContext that creates the fetch phase context and also associates them with a name.
* See {@link SearchContext#getFetchSubPhaseContext(FetchSubPhase.ContextFactory)}
*/
public interface ContextFactory<SubPhaseContext extends FetchSubPhaseContext> {
/**
* The name of the context.
*/
public String getName();
/**
* Creates a new instance of a FetchSubPhaseContext that holds all information a FetchSubPhase needs to execute on hits.
*/
public SubPhaseContext newContextInstance();
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch;
/**
* All configuration and context needed by the FetchSubPhase to execute on hits.
* The only required information in this base class is whether or not the sub phase needs to be run at all.
* It can be extended by FetchSubPhases to hold information the phase needs to execute on hits.
* See {@link org.elasticsearch.search.fetch.FetchSubPhase.ContextFactory} and also {@link org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext} for an example.
*/
public class FetchSubPhaseContext {
// This is to store if the FetchSubPhase should be executed at all.
private boolean hitExecutionNeeded = false;
/**
* Set if this phase should be executed at all.
*/
void setHitExecutionNeeded(boolean hitExecutionNeeded) {
this.hitExecutionNeeded = hitExecutionNeeded;
}
/**
* Returns if this phase be executed at all.
*/
public boolean hitExecutionNeeded() {
return hitExecutionNeeded;
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch;
import com.google.common.collect.Lists;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.Multibinder;
import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase;
import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase;
import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.source.FetchSourceSubPhase;
import org.elasticsearch.search.fetch.version.VersionFetchSubPhase;
import org.elasticsearch.search.highlight.HighlightPhase;
import java.util.List;
/**
* Module for registering fetch sub phases. Fetch phases are executed when the document is finally
* retrieved from the shard. To implement a new fetch phase one needs to implement the following classes and interfaces
* <p/>
* <ul>
* <li> {@link FetchSubPhaseParseElement} </li>
* <li> {@link FetchSubPhase} </li>
* <li> {@link FetchSubPhaseContext} </li>
* </ul>
* <p/>
* The FetchSubPhase must then be registered with this module with {@link FetchSubPhaseModule#registerFetchSubPhase(Class<? extends FetchSubPhase>)}.
* See {@link FieldDataFieldsFetchSubPhase} for an example.
*/
public class FetchSubPhaseModule extends AbstractModule {
private List<Class<? extends FetchSubPhase>> fetchSubPhases = Lists.newArrayList();
public FetchSubPhaseModule() {
registerFetchSubPhase(ExplainFetchSubPhase.class);
registerFetchSubPhase(FieldDataFieldsFetchSubPhase.class);
registerFetchSubPhase(ScriptFieldsFetchSubPhase.class);
registerFetchSubPhase(FetchSourceSubPhase.class);
registerFetchSubPhase(VersionFetchSubPhase.class);
registerFetchSubPhase(MatchedQueriesFetchSubPhase.class);
registerFetchSubPhase(HighlightPhase.class);
}
public void registerFetchSubPhase(Class<? extends FetchSubPhase> subPhase) {
fetchSubPhases.add(subPhase);
}
@Override
protected void configure() {
Multibinder<FetchSubPhase> parserMapBinder = Multibinder.newSetBinder(binder(), FetchSubPhase.class);
for (Class<? extends FetchSubPhase> clazz : fetchSubPhases) {
parserMapBinder.addBinding().to(clazz);
}
bind(InnerHitsFetchSubPhase.class).asEagerSingleton();
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
/**
* A parse element for a {@link org.elasticsearch.search.fetch.FetchSubPhase} that is used when parsing a search request.
*/
public abstract class FetchSubPhaseParseElement<SubPhaseContext extends FetchSubPhaseContext> implements SearchParseElement {
@Override
final public void parse(XContentParser parser, SearchContext context) throws Exception {
SubPhaseContext fetchSubPhaseContext = context.getFetchSubPhaseContext(getContextFactory());
// this is to make sure that the SubFetchPhase knows it should execute
fetchSubPhaseContext.setHitExecutionNeeded(true);
innerParse(parser, fetchSubPhaseContext, context);
}
/**
* Implement the actual parsing here.
*/
protected abstract void innerParse(XContentParser parser, SubPhaseContext fetchSubPhaseContext, SearchContext searchContext) throws Exception;
/**
* Return the ContextFactory for this FetchSubPhase.
*/
protected abstract FetchSubPhase.ContextFactory<SubPhaseContext> getContextFactory();
}

View File

@ -19,13 +19,14 @@
package org.elasticsearch.search.fetch.fielddata;
import com.google.common.collect.Lists;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import java.util.List;
/**
* All the required context to pull a field from the field data cache.
*/
public class FieldDataFieldsContext {
public class FieldDataFieldsContext extends FetchSubPhaseContext {
public static class FieldDataField {
private final String name;

View File

@ -27,6 +27,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
@ -43,6 +44,20 @@ import java.util.Map;
*/
public class FieldDataFieldsFetchSubPhase implements FetchSubPhase {
public static final String[] NAMES = {"fielddata_fields", "fielddataFields"};
public static final ContextFactory<FieldDataFieldsContext> CONTEXT_FACTORY = new ContextFactory<FieldDataFieldsContext>() {
@Override
public String getName() {
return NAMES[0];
}
@Override
public FieldDataFieldsContext newContextInstance() {
return new FieldDataFieldsContext();
}
};
@Inject
public FieldDataFieldsFetchSubPhase() {
}
@ -66,12 +81,12 @@ public class FieldDataFieldsFetchSubPhase implements FetchSubPhase {
@Override
public boolean hitExecutionNeeded(SearchContext context) {
return context.hasFieldDataFields();
return context.getFetchSubPhaseContext(CONTEXT_FACTORY).hitExecutionNeeded();
}
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
for (FieldDataFieldsContext.FieldDataField field : context.fieldDataFields().fields()) {
for (FieldDataFieldsContext.FieldDataField field : context.getFetchSubPhaseContext(CONTEXT_FACTORY).fields()) {
if (hitContext.hit().fieldsOrNull() == null) {
hitContext.hit().fields(new HashMap<String, SearchHitField>(2));
}

View File

@ -20,12 +20,15 @@ package org.elasticsearch.search.fetch.fielddata;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.FetchSubPhaseParseElement;
import org.elasticsearch.search.internal.SearchContext;
/**
* Parses field name values from the {@code fielddata_fields} parameter in a
* search request.
*
* <p/>
* <pre>
* {
* "query": {...},
@ -33,20 +36,26 @@ import org.elasticsearch.search.internal.SearchContext;
* }
* </pre>
*/
public class FieldDataFieldsParseElement implements SearchParseElement {
public class FieldDataFieldsParseElement extends FetchSubPhaseParseElement<FieldDataFieldsContext> {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
protected void innerParse(XContentParser parser, FieldDataFieldsContext fieldDataFieldsContext, SearchContext searchContext) throws Exception {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_ARRAY) {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
String fieldName = parser.text();
context.fieldDataFields().add(new FieldDataFieldsContext.FieldDataField(fieldName));
fieldDataFieldsContext.add(new FieldDataFieldsContext.FieldDataField(fieldName));
}
} else if (token == XContentParser.Token.VALUE_STRING) {
String fieldName = parser.text();
context.fieldDataFields().add(new FieldDataFieldsContext.FieldDataField(fieldName));
} else {
fieldDataFieldsContext.add(new FieldDataFieldsContext.FieldDataField(fieldName));
} else {
throw new IllegalStateException("Expected either a VALUE_STRING or an START_ARRAY but got " + token);
}
}
@Override
protected FetchSubPhase.ContextFactory getContextFactory() {
return FieldDataFieldsFetchSubPhase.CONTEXT_FACTORY;
}
}

View File

@ -53,7 +53,8 @@ import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -99,7 +100,6 @@ public class DefaultSearchContext extends SearchContext {
private boolean explain;
private boolean version = false; // by default, we don't return versions
private List<String> fieldNames;
private FieldDataFieldsContext fieldDataFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private int from = -1;
@ -126,11 +126,13 @@ public class DefaultSearchContext extends SearchContext {
private volatile long lastAccessTime = -1;
private InnerHitsContext innerHitsContext;
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
ScriptService scriptService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher,
TimeValue timeout
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
ScriptService scriptService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher,
TimeValue timeout
) {
super(parseFieldMatcher);
this.id = id;
@ -302,6 +304,16 @@ public class DefaultSearchContext extends SearchContext {
return this;
}
@Override
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
String subPhaseName = contextFactory.getName();
if (subPhaseContexts.get(subPhaseName) == null) {
subPhaseContexts.put(subPhaseName, contextFactory.newContextInstance());
}
return (SubPhaseContext) subPhaseContexts.get(subPhaseName);
}
@Override
public SearchContextHighlight highlight() {
return highlight;
@ -338,19 +350,6 @@ public class DefaultSearchContext extends SearchContext {
this.rescore.add(rescore);
}
@Override
public boolean hasFieldDataFields() {
return fieldDataFields != null;
}
@Override
public FieldDataFieldsContext fieldDataFields() {
if (fieldDataFields == null) {
fieldDataFields = new FieldDataFieldsContext();
}
return this.fieldDataFields;
}
@Override
public boolean hasScriptFields() {
return scriptFields != null;

View File

@ -45,7 +45,8 @@ import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -209,16 +210,6 @@ public abstract class FilteredSearchContext extends SearchContext {
in.addRescore(rescore);
}
@Override
public boolean hasFieldDataFields() {
return in.hasFieldDataFields();
}
@Override
public FieldDataFieldsContext fieldDataFields() {
return in.fieldDataFields();
}
@Override
public boolean hasScriptFields() {
return in.hasScriptFields();
@ -628,4 +619,9 @@ public abstract class FilteredSearchContext extends SearchContext {
public void copyContextAndHeadersFrom(HasContextAndHeaders other) {
in.copyContextAndHeadersFrom(other);
}
@Override
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
return in.getFetchSubPhaseContext(contextFactory);
}
}

View File

@ -50,7 +50,8 @@ import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -163,6 +164,8 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
public abstract SearchContext aggregations(SearchContextAggregations aggregations);
public abstract <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory);
public abstract SearchContextHighlight highlight();
public abstract void highlight(SearchContextHighlight highlight);
@ -182,10 +185,6 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
public abstract void addRescore(RescoreSearchContext rescore);
public abstract boolean hasFieldDataFields();
public abstract FieldDataFieldsContext fieldDataFields();
public abstract boolean hasScriptFields();
public abstract ScriptFieldsContext scriptFields();

View File

@ -30,7 +30,6 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -62,7 +61,6 @@ public class SubSearchContext extends FilteredSearchContext {
private int docsIdsToLoadSize;
private List<String> fieldNames;
private FieldDataFieldsContext fieldDataFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private SearchContextHighlight highlight;
@ -132,19 +130,6 @@ public class SubSearchContext extends FilteredSearchContext {
throw new UnsupportedOperationException("Not supported");
}
@Override
public boolean hasFieldDataFields() {
return fieldDataFields != null;
}
@Override
public FieldDataFieldsContext fieldDataFields() {
if (fieldDataFields == null) {
fieldDataFields = new FieldDataFieldsContext();
}
return this.fieldDataFields;
}
@Override
public boolean hasScriptFields() {
return scriptFields != null;

View File

@ -0,0 +1,218 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
/**
*
*/
@ClusterScope(scope = Scope.SUITE, numDataNodes = 1)
public class FetchSubPhasePluginTests extends ElasticsearchIntegrationTest {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
.put("plugin.types", FetchTermVectorsPlugin.class.getName())
.build();
}
@Test
public void testPlugin() throws Exception {
client().admin()
.indices()
.prepareCreate("test")
.addMapping(
"type1",
jsonBuilder()
.startObject().startObject("type1")
.startObject("properties")
.startObject("test")
.field("type", "string").field("term_vector", "yes")
.endObject()
.endObject()
.endObject().endObject()).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet();
client().index(
indexRequest("test").type("type1").id("1")
.source(jsonBuilder().startObject().field("test", "I am sam i am").endObject())).actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
String searchSource = jsonBuilder().startObject()
.field("term_vectors_fetch", "test")
.endObject().string();
SearchResponse response = client().prepareSearch().setSource(searchSource).get();
assertSearchResponse(response);
assertThat(((Map<String, Integer>) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("i"), equalTo(2));
assertThat(((Map<String, Integer>) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("am"), equalTo(2));
assertThat(((Map<String, Integer>) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("sam"), equalTo(1));
}
public static class FetchTermVectorsPlugin extends AbstractPlugin {
@Override
public String name() {
return "fetch-term-vectors";
}
@Override
public String description() {
return "fetch plugin to test if the plugin mechanism works";
}
public void onModule(FetchSubPhaseModule fetchSubPhaseModule) {
fetchSubPhaseModule.registerFetchSubPhase(TermVectorsFetchSubPhase.class);
}
}
public static class TermVectorsFetchSubPhase implements FetchSubPhase {
public static final ContextFactory<TermVectorsFetchContext> CONTEXT_FACTORY = new ContextFactory<TermVectorsFetchContext>() {
@Override
public String getName() {
return NAMES[0];
}
@Override
public TermVectorsFetchContext newContextInstance() {
return new TermVectorsFetchContext();
}
};
public TermVectorsFetchSubPhase() {
}
public static final String[] NAMES = {"term_vectors_fetch"};
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of("term_vectors_fetch", new TermVectorsFetchParseElement());
}
@Override
public boolean hitsExecutionNeeded(SearchContext context) {
return false;
}
@Override
public void hitsExecute(SearchContext context, InternalSearchHit[] hits) {
}
@Override
public boolean hitExecutionNeeded(SearchContext context) {
return context.getFetchSubPhaseContext(CONTEXT_FACTORY).hitExecutionNeeded();
}
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
String field = context.getFetchSubPhaseContext(CONTEXT_FACTORY).getField();
if (hitContext.hit().fieldsOrNull() == null) {
hitContext.hit().fields(new HashMap<String, SearchHitField>());
}
SearchHitField hitField = hitContext.hit().fields().get(NAMES[0]);
if (hitField == null) {
hitField = new InternalSearchHitField(NAMES[0], new ArrayList<>(1));
hitContext.hit().fields().put(NAMES[0], hitField);
}
TermVectorsResponse termVector = context.indexShard().termVectorsService().getTermVectors(new TermVectorsRequest(context.indexShard().indexService().index().getName(), hitContext.hit().type(), hitContext.hit().id()), context.indexShard().indexService().index().getName());
try {
Map<String, Integer> tv = new HashMap<>();
TermsEnum terms = termVector.getFields().terms(field).iterator();
BytesRef term;
while ((term = terms.next()) != null) {
tv.put(term.utf8ToString(), terms.postings(null, null, PostingsEnum.ALL).freq());
}
hitField.values().add(tv);
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static class TermVectorsFetchParseElement extends FetchSubPhaseParseElement<TermVectorsFetchContext> {
@Override
protected void innerParse(XContentParser parser, TermVectorsFetchContext termVectorsFetchContext, SearchContext searchContext) throws Exception {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_STRING) {
String fieldName = parser.text();
termVectorsFetchContext.setField(fieldName);
} else {
throw new IllegalStateException("Expected a VALUE_STRING but got " + token);
}
}
@Override
protected FetchSubPhase.ContextFactory getContextFactory() {
return TermVectorsFetchSubPhase.CONTEXT_FACTORY;
}
}
public static class TermVectorsFetchContext extends FetchSubPhaseContext {
private String field = null;
public TermVectorsFetchContext() {
}
public void setField(String field) {
this.field = field;
}
public String getField() {
return field;
}
}
}

View File

@ -48,7 +48,8 @@ import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -63,9 +64,7 @@ import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.*;
public class TestSearchContext extends SearchContext {
@ -83,6 +82,7 @@ public class TestSearchContext extends SearchContext {
private SearchContextAggregations aggregations;
private final long originNanoTime = System.nanoTime();
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
public TestSearchContext(ThreadPool threadPool,PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, IndexService indexService, QueryCache filterCache, IndexFieldDataService indexFieldDataService) {
super(ParseFieldMatcher.STRICT);
@ -203,6 +203,15 @@ public class TestSearchContext extends SearchContext {
return this;
}
@Override
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
String subPhaseName = contextFactory.getName();
if (subPhaseContexts.get(subPhaseName) == null) {
subPhaseContexts.put(subPhaseName, contextFactory.newContextInstance());
}
return (SubPhaseContext) subPhaseContexts.get(subPhaseName);
}
@Override
public SearchContextHighlight highlight() {
return null;
@ -230,16 +239,6 @@ public class TestSearchContext extends SearchContext {
public void addRescore(RescoreSearchContext rescore) {
}
@Override
public boolean hasFieldDataFields() {
return false;
}
@Override
public FieldDataFieldsContext fieldDataFields() {
return null;
}
@Override
public boolean hasScriptFields() {
return false;