Cleanup query parsing and remove IndexQueryParserService

IndexQueryParserService is only a factory for QueryShardContext instances
which are not even bound to a shard. The service only forwards dependencies and even
references node level service directly which makes dependency seperation on shard,
index and node level hard. This commit removes the service entirely, folds the creation
of QueryShardContext into IndexShard which is it's logical place and detaches the
ClusterService needed for index name matching during query parsing with a simple predicate
interface on IndexSettings.
This commit is contained in:
Simon Willnauer 2015-11-02 20:36:10 +01:00
parent b56bbf62dd
commit 4176964358
80 changed files with 581 additions and 639 deletions

View File

@ -119,7 +119,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
} }
if (field == null) { if (field == null) {
if (indexService != null) { if (indexService != null) {
field = indexService.queryParserService().defaultField(); field = indexService.getIndexSettings().getDefaultField();
} else { } else {
field = AllFieldMapper.NAME; field = AllFieldMapper.NAME;
} }

View File

@ -42,7 +42,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
@ -162,8 +162,8 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
@Override @Override
protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) { protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) {
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexQueryParserService queryParserService = indexService.queryParserService();
IndexShard indexShard = indexService.getShard(request.shardId().id()); IndexShard indexShard = indexService.getShard(request.shardId().id());
QueryShardContext queryParserService = indexShard.getQueryShardContext();
boolean valid; boolean valid;
String explanation = null; String explanation = null;

View File

@ -121,7 +121,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
SearchContext.setCurrent(context); SearchContext.setCurrent(context);
try { try {
context.parsedQuery(indexService.queryParserService().toQuery(request.query())); context.parsedQuery(indexShard.getQueryShardContext().toQuery(request.query()));
context.preProcess(); context.preProcess();
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase; int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase;
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);

View File

@ -142,8 +142,7 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
if (parser.nextToken() != XContentParser.Token.START_OBJECT) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("suggest content missing"); throw new IllegalArgumentException("suggest content missing");
} }
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), request.shardId().getIndex(), request.shardId().id(), request);
indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id(), request);
final Suggest result = suggestPhase.execute(context, searcher.searcher()); final Suggest result = suggestPhase.execute(context, searcher.searcher());
return new ShardSuggestResponse(request.shardId(), result); return new ShardSuggestResponse(request.shardId(), result);
} }

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.indices.InvalidAliasNameException;
@ -113,14 +112,14 @@ public class AliasValidator extends AbstractComponent {
/** /**
* Validates an alias filter by parsing it using the * Validates an alias filter by parsing it using the
* provided {@link org.elasticsearch.index.query.IndexQueryParserService} * provided {@link org.elasticsearch.index.query.QueryShardContext}
* @throws IllegalArgumentException if the filter is not valid * @throws IllegalArgumentException if the filter is not valid
*/ */
public void validateAliasFilter(String alias, String filter, IndexQueryParserService indexQueryParserService) { public void validateAliasFilter(String alias, String filter, QueryShardContext queryShardContext) {
assert indexQueryParserService != null; assert queryShardContext != null;
try { try {
XContentParser parser = XContentFactory.xContent(filter).createParser(filter); XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
validateAliasFilter(parser, indexQueryParserService); validateAliasFilter(parser, queryShardContext);
} catch (Throwable e) { } catch (Throwable e) {
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e); throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
} }
@ -128,26 +127,25 @@ public class AliasValidator extends AbstractComponent {
/** /**
* Validates an alias filter by parsing it using the * Validates an alias filter by parsing it using the
* provided {@link org.elasticsearch.index.query.IndexQueryParserService} * provided {@link org.elasticsearch.index.query.QueryShardContext}
* @throws IllegalArgumentException if the filter is not valid * @throws IllegalArgumentException if the filter is not valid
*/ */
public void validateAliasFilter(String alias, byte[] filter, IndexQueryParserService indexQueryParserService) { public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext) {
assert indexQueryParserService != null; assert queryShardContext != null;
try { try {
XContentParser parser = XContentFactory.xContent(filter).createParser(filter); XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
validateAliasFilter(parser, indexQueryParserService); validateAliasFilter(parser, queryShardContext);
} catch (Throwable e) { } catch (Throwable e) {
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e); throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
} }
} }
private void validateAliasFilter(XContentParser parser, IndexQueryParserService indexQueryParserService) throws IOException { private void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
QueryShardContext context = indexQueryParserService.getShardContext();
try { try {
context.reset(parser); queryShardContext.reset(parser);
context.parseContext().parseInnerQueryBuilder().toFilter(context); queryShardContext.parseContext().parseInnerQueryBuilder().toFilter(queryShardContext);
} finally { } finally {
context.reset(null); queryShardContext.reset(null);
parser.close(); parser.close();
} }
} }

View File

@ -806,4 +806,17 @@ public class IndexNameExpressionResolver extends AbstractComponent {
} }
} }
/**
* Returns <code>true</code> iff the given expression resolves to the given index name otherwise <code>false</code>
*/
public final boolean matchesIndex(String indexName, String expression, ClusterState state) {
final String[] concreteIndices = concreteIndices(state, IndicesOptions.lenientExpandOpen(), expression);
for (String index : concreteIndices) {
if (Regex.simpleMatch(index, indexName)) {
return true;
}
}
return indexName.equals(expression);
}
} }

View File

@ -26,7 +26,6 @@ import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
@ -41,48 +40,37 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndexCreationException; import org.elasticsearch.indices.IndexCreationException;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.*; import java.util.*;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE;
@ -334,15 +322,15 @@ public class MetaDataCreateIndexService extends AbstractComponent {
} }
} }
IndexQueryParserService indexQueryParserService = indexService.queryParserService(); QueryShardContext queryShardContext = indexService.getQueryShardContext();
for (Alias alias : request.aliases()) { for (Alias alias : request.aliases()) {
if (Strings.hasLength(alias.filter())) { if (Strings.hasLength(alias.filter())) {
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), indexQueryParserService); aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext);
} }
} }
for (AliasMetaData aliasMetaData : templatesAliases.values()) { for (AliasMetaData aliasMetaData : templatesAliases.values()) {
if (aliasMetaData.filter() != null) { if (aliasMetaData.filter() != null) {
aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), indexQueryParserService); aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext);
} }
} }

View File

@ -112,7 +112,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
indices.put(indexMetaData.getIndex(), indexService); indices.put(indexMetaData.getIndex(), indexService);
} }
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.queryParserService()); aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.getQueryShardContext());
} }
AliasMetaData newAliasMd = AliasMetaData.newAliasMetaDataBuilder( AliasMetaData newAliasMd = AliasMetaData.newAliasMetaDataBuilder(
aliasAction.alias()) aliasAction.alias())

View File

@ -20,23 +20,22 @@
package org.elasticsearch.common; package org.elasticsearch.common;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.IndexQueryParserService;
import java.util.EnumSet; import java.util.EnumSet;
/** /**
* Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField} * Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField}
* against a field name and throw deprecation exception depending on the current value of the {@link IndexQueryParserService#PARSE_STRICT} setting. * against a field name and throw deprecation exception depending on the current value of the {@link #PARSE_STRICT} setting.
*/ */
public class ParseFieldMatcher { public class ParseFieldMatcher {
public static final String PARSE_STRICT = "index.query.parse.strict";
public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(ParseField.EMPTY_FLAGS); public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(ParseField.EMPTY_FLAGS);
public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(ParseField.STRICT_FLAGS); public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(ParseField.STRICT_FLAGS);
private final EnumSet<ParseField.Flag> parseFlags; private final EnumSet<ParseField.Flag> parseFlags;
public ParseFieldMatcher(Settings settings) { public ParseFieldMatcher(Settings settings) {
if (settings.getAsBoolean(IndexQueryParserService.PARSE_STRICT, false)) { if (settings.getAsBoolean(PARSE_STRICT, false)) {
this.parseFlags = EnumSet.of(ParseField.Flag.STRICT); this.parseFlags = EnumSet.of(ParseField.Flag.STRICT);
} else { } else {
this.parseFlags = ParseField.EMPTY_FLAGS; this.parseFlags = ParseField.EMPTY_FLAGS;
@ -49,7 +48,7 @@ public class ParseFieldMatcher {
/** /**
* Matches a {@link ParseField} against a field name, and throws deprecation exception depending on the current * Matches a {@link ParseField} against a field name, and throws deprecation exception depending on the current
* value of the {@link IndexQueryParserService#PARSE_STRICT} setting. * value of the {@link #PARSE_STRICT} setting.
* @param fieldName the field name found in the request while parsing * @param fieldName the field name found in the request while parsing
* @param parseField the parse field that we are looking for * @param parseField the parse field that we are looking for
* @throws IllegalArgumentException whenever we are in strict mode and the request contained a deprecated field * @throws IllegalArgumentException whenever we are in strict mode and the request contained a deprecated field

View File

@ -222,8 +222,9 @@ public final class IndexModule extends AbstractModule {
@Override @Override
protected void configure() { protected void configure() {
final IndexSettings settings = indexSettings.newWithListener(settingsConsumers);
try { try {
bind(AnalysisService.class).toInstance(analysisRegistry.build(indexSettings)); bind(AnalysisService.class).toInstance(analysisRegistry.build(settings));
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("can't create analysis service", e); throw new ElasticsearchException("can't create analysis service", e);
} }
@ -234,7 +235,6 @@ public final class IndexModule extends AbstractModule {
bind(IndexServicesProvider.class).asEagerSingleton(); bind(IndexServicesProvider.class).asEagerSingleton();
bind(MapperService.class).asEagerSingleton(); bind(MapperService.class).asEagerSingleton();
bind(IndexFieldDataService.class).asEagerSingleton(); bind(IndexFieldDataService.class).asEagerSingleton();
final IndexSettings settings = new IndexSettings(indexSettings.getIndexMetaData(), indexSettings.getNodeSettings(), settingsConsumers);
bind(IndexSettings.class).toInstance(settings); bind(IndexSettings.class).toInstance(settings);
final String storeType = settings.getSettings().get(STORE_TYPE); final String storeType = settings.getSettings().get(STORE_TYPE);

View File

@ -43,8 +43,8 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.*; import org.elasticsearch.index.shard.*;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStore;
@ -164,10 +164,6 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
return indexServicesProvider.getMapperService(); return indexServicesProvider.getMapperService();
} }
public IndexQueryParserService queryParserService() {
return indexServicesProvider.getQueryParserService();
}
public SimilarityService similarityService() { public SimilarityService similarityService() {
return indexServicesProvider.getSimilarityService(); return indexServicesProvider.getSimilarityService();
} }
@ -362,6 +358,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
return indexSettings; return indexSettings;
} }
public QueryShardContext getQueryShardContext() {
return new QueryShardContext(indexSettings, indexServicesProvider.getClient(), bitsetFilterCache(), indexServicesProvider.getIndexFieldDataService(), mapperService(), similarityService(), indexServicesProvider.getScriptService(), indexServicesProvider.getIndicesQueriesRegistry());
}
private class StoreCloseListener implements Store.OnClose { private class StoreCloseListener implements Store.OnClose {
private final ShardId shardId; private final ShardId shardId;
private final boolean ownsShard; private final boolean ownsShard;
@ -452,11 +452,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
* The list of filtering aliases should be obtained by calling MetaData.filteringAliases. * The list of filtering aliases should be obtained by calling MetaData.filteringAliases.
* Returns <tt>null</tt> if no filtering is required.</p> * Returns <tt>null</tt> if no filtering is required.</p>
*/ */
public Query aliasFilter(String... aliasNames) { public Query aliasFilter(QueryShardContext context, String... aliasNames) {
if (aliasNames == null || aliasNames.length == 0) { if (aliasNames == null || aliasNames.length == 0) {
return null; return null;
} }
final IndexQueryParserService indexQueryParser = queryParserService();
final ImmutableOpenMap<String, AliasMetaData> aliases = indexSettings.getIndexMetaData().getAliases(); final ImmutableOpenMap<String, AliasMetaData> aliases = indexSettings.getIndexMetaData().getAliases();
if (aliasNames.length == 1) { if (aliasNames.length == 1) {
AliasMetaData alias = aliases.get(aliasNames[0]); AliasMetaData alias = aliases.get(aliasNames[0]);
@ -464,7 +463,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
// This shouldn't happen unless alias disappeared after filteringAliases was called. // This shouldn't happen unless alias disappeared after filteringAliases was called.
throw new InvalidAliasNameException(index(), aliasNames[0], "Unknown alias name was passed to alias Filter"); throw new InvalidAliasNameException(index(), aliasNames[0], "Unknown alias name was passed to alias Filter");
} }
return parse(alias, indexQueryParser); return parse(alias, context);
} else { } else {
// we need to bench here a bit, to see maybe it makes sense to use OrFilter // we need to bench here a bit, to see maybe it makes sense to use OrFilter
BooleanQuery.Builder combined = new BooleanQuery.Builder(); BooleanQuery.Builder combined = new BooleanQuery.Builder();
@ -472,9 +471,9 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
AliasMetaData alias = aliases.get(aliasName); AliasMetaData alias = aliases.get(aliasName);
if (alias == null) { if (alias == null) {
// This shouldn't happen unless alias disappeared after filteringAliases was called. // This shouldn't happen unless alias disappeared after filteringAliases was called.
throw new InvalidAliasNameException(indexQueryParser.index(), aliasNames[0], "Unknown alias name was passed to alias Filter"); throw new InvalidAliasNameException(indexSettings.getIndex(), aliasNames[0], "Unknown alias name was passed to alias Filter");
} }
Query parsedFilter = parse(alias, indexQueryParser); Query parsedFilter = parse(alias, context);
if (parsedFilter != null) { if (parsedFilter != null) {
combined.add(parsedFilter, BooleanClause.Occur.SHOULD); combined.add(parsedFilter, BooleanClause.Occur.SHOULD);
} else { } else {
@ -486,18 +485,18 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
} }
} }
private Query parse(AliasMetaData alias, IndexQueryParserService indexQueryParser) { private Query parse(AliasMetaData alias, QueryShardContext parseContext) {
if (alias.filter() == null) { if (alias.filter() == null) {
return null; return null;
} }
try { try {
byte[] filterSource = alias.filter().uncompressed(); byte[] filterSource = alias.filter().uncompressed();
try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) { try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
ParsedQuery parsedFilter = indexQueryParser.parseInnerFilter(parser); ParsedQuery parsedFilter = parseContext.parseInnerFilter(parser);
return parsedFilter == null ? null : parsedFilter.query(); return parsedFilter == null ? null : parsedFilter.query();
} }
} catch (IOException ex) { } catch (IOException ex) {
throw new AliasFilterParsingException(indexQueryParser.index(), alias.getAlias(), "Invalid alias filter", ex); throw new AliasFilterParsingException(parseContext.index(), alias.getAlias(), "Invalid alias filter", ex);
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index; package org.elasticsearch.index;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
@ -27,14 +28,14 @@ import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexSearcherWrapper;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import org.elasticsearch.indices.memory.IndexingMemoryController; import org.elasticsearch.indices.memory.IndexingMemoryController;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
/** /**
@ -46,7 +47,6 @@ public final class IndexServicesProvider {
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final MapperService mapperService; private final MapperService mapperService;
private final IndexQueryParserService queryParserService;
private final IndexCache indexCache; private final IndexCache indexCache;
private final IndicesQueryCache indicesQueryCache; private final IndicesQueryCache indicesQueryCache;
private final CodecService codecService; private final CodecService codecService;
@ -58,13 +58,15 @@ public final class IndexServicesProvider {
private final BigArrays bigArrays; private final BigArrays bigArrays;
private final IndexingMemoryController indexingMemoryController; private final IndexingMemoryController indexingMemoryController;
private final IndexEventListener listener; private final IndexEventListener listener;
private final Client client;
private final IndicesQueriesRegistry indicesQueriesRegistry;
private final ScriptService scriptService;
@Inject @Inject
public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController) { public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController, Client client, ScriptService scriptService, IndicesQueriesRegistry indicesQueriesRegistry) {
this.listener = listener; this.listener = listener;
this.threadPool = threadPool; this.threadPool = threadPool;
this.mapperService = mapperService; this.mapperService = mapperService;
this.queryParserService = queryParserService;
this.indexCache = indexCache; this.indexCache = indexCache;
this.indicesQueryCache = indicesQueryCache; this.indicesQueryCache = indicesQueryCache;
this.codecService = codecService; this.codecService = codecService;
@ -75,6 +77,9 @@ public final class IndexServicesProvider {
this.factory = factory; this.factory = factory;
this.bigArrays = bigArrays; this.bigArrays = bigArrays;
this.indexingMemoryController = indexingMemoryController; this.indexingMemoryController = indexingMemoryController;
this.client = client;
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.scriptService = scriptService;
} }
public IndexEventListener getIndexEventListener() { public IndexEventListener getIndexEventListener() {
@ -88,10 +93,6 @@ public final class IndexServicesProvider {
return mapperService; return mapperService;
} }
public IndexQueryParserService getQueryParserService() {
return queryParserService;
}
public IndexCache getIndexCache() { public IndexCache getIndexCache() {
return indexCache; return indexCache;
} }
@ -126,6 +127,18 @@ public final class IndexServicesProvider {
public BigArrays getBigArrays() { return bigArrays; } public BigArrays getBigArrays() { return bigArrays; }
public Client getClient() {
return client;
}
public IndicesQueriesRegistry getIndicesQueriesRegistry() {
return indicesQueriesRegistry;
}
public ScriptService getScriptService() {
return scriptService;
}
public IndexingMemoryController getIndexingMemoryController() { public IndexingMemoryController getIndexingMemoryController() {
return indexingMemoryController; return indexingMemoryController;
} }

View File

@ -20,16 +20,20 @@ package org.elasticsearch.index;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Predicate;
/** /**
* This class encapsulates all index level settings and handles settings updates. * This class encapsulates all index level settings and handles settings updates.
@ -39,6 +43,12 @@ import java.util.function.Consumer;
* be called for each settings update. * be called for each settings update.
*/ */
public final class IndexSettings { public final class IndexSettings {
public static final String DEFAULT_FIELD = "index.query.default_field";
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard";
public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard";
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
private final String uuid; private final String uuid;
private final List<Consumer<Settings>> updateListeners; private final List<Consumer<Settings>> updateListeners;
private final Index index; private final Index index;
@ -48,10 +58,51 @@ public final class IndexSettings {
private final Settings nodeSettings; private final Settings nodeSettings;
private final int numberOfShards; private final int numberOfShards;
private final boolean isShadowReplicaIndex; private final boolean isShadowReplicaIndex;
private final ParseFieldMatcher parseFieldMatcher;
// volatile fields are updated via #updateIndexMetaData(IndexMetaData) under lock // volatile fields are updated via #updateIndexMetaData(IndexMetaData) under lock
private volatile Settings settings; private volatile Settings settings;
private volatile IndexMetaData indexMetaData; private volatile IndexMetaData indexMetaData;
private final String defaultField;
private final boolean queryStringLenient;
private final boolean queryStringAnalyzeWildcard;
private final boolean queryStringAllowLeadingWildcard;
private final boolean defaultAllowUnmappedFields;
private final Predicate<String> indexNameMatcher;
/**
* Returns the default search field for this index.
*/
public String getDefaultField() {
return defaultField;
}
/**
* Returns <code>true</code> if query string parsing should be lenient. The default is <code>false</code>
*/
public boolean isQueryStringLenient() {
return queryStringLenient;
}
/**
* Returns <code>true</code> if the query string should analyze wildcards. The default is <code>false</code>
*/
public boolean isQueryStringAnalyzeWildcard() {
return queryStringAnalyzeWildcard;
}
/**
* Returns <code>true</code> if the query string parser should allow leading wildcards. The default is <code>true</code>
*/
public boolean isQueryStringAllowLeadingWildcard() {
return queryStringAllowLeadingWildcard;
}
/**
* Returns <code>true</code> if queries should be lenient about unmapped fields. The default is <code>true</code>
*/
public boolean isDefaultAllowUnmappedFields() {
return defaultAllowUnmappedFields;
}
/** /**
* Creates a new {@link IndexSettings} instance. The given node settings will be merged with the settings in the metadata * Creates a new {@link IndexSettings} instance. The given node settings will be merged with the settings in the metadata
@ -62,6 +113,19 @@ public final class IndexSettings {
* @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated * @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated
*/ */
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection<Consumer<Settings>> updateListeners) { public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection<Consumer<Settings>> updateListeners) {
this(indexMetaData, nodeSettings, updateListeners, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()));
}
/**
* Creates a new {@link IndexSettings} instance. The given node settings will be merged with the settings in the metadata
* while index level settings will overwrite node settings.
*
* @param indexMetaData the index metadata this settings object is associated with
* @param nodeSettings the nodes settings this index is allocated on.
* @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated
* @param indexNameMatcher a matcher that can resolve an expression to the index name or index alias
*/
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection<Consumer<Settings>> updateListeners, final Predicate<String> indexNameMatcher) {
this.nodeSettings = nodeSettings; this.nodeSettings = nodeSettings;
this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build(); this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build();
this.updateListeners = Collections.unmodifiableList(new ArrayList<>(updateListeners)); this.updateListeners = Collections.unmodifiableList(new ArrayList<>(updateListeners));
@ -73,6 +137,25 @@ public final class IndexSettings {
this.indexMetaData = indexMetaData; this.indexMetaData = indexMetaData;
numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null); numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
isShadowReplicaIndex = IndexMetaData.isIndexUsingShadowReplicas(settings); isShadowReplicaIndex = IndexMetaData.isIndexUsingShadowReplicas(settings);
this.defaultField = settings.get(DEFAULT_FIELD, AllFieldMapper.NAME);
this.queryStringLenient = settings.getAsBoolean(QUERY_STRING_LENIENT, false);
this.queryStringAnalyzeWildcard = settings.getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false);
this.queryStringAllowLeadingWildcard = settings.getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true);
this.parseFieldMatcher = new ParseFieldMatcher(settings);
this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true);
this.indexNameMatcher = indexNameMatcher;
assert indexNameMatcher.test(indexMetaData.getIndex());
}
/**
* Creates a new {@link IndexSettings} instance adding the given listeners to the settings
*/
IndexSettings newWithListener(final Collection<Consumer<Settings>> updateListeners) {
ArrayList<Consumer<Settings>> newUpdateListeners = new ArrayList<>(updateListeners);
newUpdateListeners.addAll(this.updateListeners);
return new IndexSettings(indexMetaData, nodeSettings, newUpdateListeners, indexNameMatcher);
} }
/** /**
@ -138,9 +221,7 @@ public final class IndexSettings {
* Returns <code>true</code> iff this index uses shadow replicas. * Returns <code>true</code> iff this index uses shadow replicas.
* @see IndexMetaData#isIndexUsingShadowReplicas(Settings) * @see IndexMetaData#isIndexUsingShadowReplicas(Settings)
*/ */
public boolean isShadowReplicaIndex() { public boolean isShadowReplicaIndex() { return isShadowReplicaIndex; }
return isShadowReplicaIndex;
}
/** /**
* Returns the node settings. The settings retured from {@link #getSettings()} are a merged version of the * Returns the node settings. The settings retured from {@link #getSettings()} are a merged version of the
@ -150,6 +231,14 @@ public final class IndexSettings {
return nodeSettings; return nodeSettings;
} }
/**
* Returns a {@link ParseFieldMatcher} for this index.
*/
public ParseFieldMatcher getParseFieldMatcher() { return parseFieldMatcher; }
public boolean isMatchIndexName(String expression) {
return indexNameMatcher.test(expression);
}
/** /**
* Updates the settings and index metadata and notifies all registered settings consumers with the new settings iff at least one setting has changed. * Updates the settings and index metadata and notifies all registered settings consumers with the new settings iff at least one setting has changed.
* *

View File

@ -220,8 +220,8 @@ public class ParentFieldMapper extends MetadataFieldMapper {
return super.termsQuery(values, context); return super.termsQuery(values, context);
} }
List<String> types = new ArrayList<>(context.mapperService().types().size()); List<String> types = new ArrayList<>(context.getMapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) { for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
if (!documentMapper.parentFieldMapper().active()) { if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type()); types.add(documentMapper.type());
} }

View File

@ -44,7 +44,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentTypeListener; import org.elasticsearch.index.mapper.DocumentTypeListener;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
@ -69,7 +68,6 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
public final String MAP_UNMAPPED_FIELDS_AS_STRING = "index.percolator.map_unmapped_fields_as_string"; public final String MAP_UNMAPPED_FIELDS_AS_STRING = "index.percolator.map_unmapped_fields_as_string";
// This is a shard level service, but these below are index level service: // This is a shard level service, but these below are index level service:
private final IndexQueryParserService queryParserService;
private final MapperService mapperService; private final MapperService mapperService;
private final IndexFieldDataService indexFieldDataService; private final IndexFieldDataService indexFieldDataService;
@ -79,18 +77,20 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
private final RealTimePercolatorOperationListener realTimePercolatorOperationListener = new RealTimePercolatorOperationListener(); private final RealTimePercolatorOperationListener realTimePercolatorOperationListener = new RealTimePercolatorOperationListener();
private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener(); private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener();
private final AtomicBoolean realTimePercolatorEnabled = new AtomicBoolean(false); private final AtomicBoolean realTimePercolatorEnabled = new AtomicBoolean(false);
private final QueryShardContext queryShardContext;
private boolean mapUnmappedFieldsAsString; private boolean mapUnmappedFieldsAsString;
private final MeanMetric percolateMetric = new MeanMetric(); private final MeanMetric percolateMetric = new MeanMetric();
private final CounterMetric currentMetric = new CounterMetric(); private final CounterMetric currentMetric = new CounterMetric();
private final CounterMetric numberOfQueries = new CounterMetric(); private final CounterMetric numberOfQueries = new CounterMetric();
public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings, IndexQueryParserService queryParserService, public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings,
ShardIndexingService indexingService, MapperService mapperService, ShardIndexingService indexingService, MapperService mapperService,
QueryShardContext queryShardContext,
IndexFieldDataService indexFieldDataService) { IndexFieldDataService indexFieldDataService) {
super(shardId, indexSettings); super(shardId, indexSettings);
this.queryParserService = queryParserService;
this.mapperService = mapperService; this.mapperService = mapperService;
this.indexingService = indexingService; this.indexingService = indexingService;
this.queryShardContext = queryShardContext;
this.indexFieldDataService = indexFieldDataService; this.indexFieldDataService = indexFieldDataService;
this.mapUnmappedFieldsAsString = this.indexSettings.getAsBoolean(MAP_UNMAPPED_FIELDS_AS_STRING, false); this.mapUnmappedFieldsAsString = this.indexSettings.getAsBoolean(MAP_UNMAPPED_FIELDS_AS_STRING, false);
mapperService.addTypeListener(percolateTypeListener); mapperService.addTypeListener(percolateTypeListener);
@ -179,7 +179,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
if (type != null) { if (type != null) {
previousTypes = QueryShardContext.setTypesWithPrevious(type); previousTypes = QueryShardContext.setTypesWithPrevious(type);
} }
QueryShardContext context = queryParserService.getShardContext(); QueryShardContext context = queryShardContext.clone();
try { try {
context.reset(parser); context.reset(parser);
// This means that fields in the query need to exist in the mapping prior to registering this query // This means that fields in the query need to exist in the mapping prior to registering this query
@ -196,7 +196,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
// as an analyzed string. // as an analyzed string.
context.setAllowUnmappedFields(false); context.setAllowUnmappedFields(false);
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString); context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
return queryParserService.parseInnerQuery(context); return context.parseInnerQuery();
} catch (IOException e) { } catch (IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e); throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
} finally { } finally {

View File

@ -245,10 +245,10 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
if (fieldType != null) { if (fieldType != null) {
analyzerObj = context.getSearchAnalyzer(fieldType); analyzerObj = context.getSearchAnalyzer(fieldType);
} else { } else {
analyzerObj = context.mapperService().searchAnalyzer(); analyzerObj = context.getMapperService().searchAnalyzer();
} }
} else { } else {
analyzerObj = context.mapperService().analysisService().analyzer(analyzer); analyzerObj = context.getMapperService().analysisService().analyzer(analyzer);
if (analyzerObj == null) { if (analyzerObj == null) {
throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found"); throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found");
} }

View File

@ -72,7 +72,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
} }
public static Query newFilter(QueryShardContext context, String fieldPattern) { public static Query newFilter(QueryShardContext context, String fieldPattern) {
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.mapperService().fullName(FieldNamesFieldMapper.NAME); final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.getMapperService().fullName(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) { if (fieldNamesFieldType == null) {
// can only happen when no types exist, so no docs exist either // can only happen when no types exist, so no docs exist either
return Queries.newMatchNoDocsQuery(); return Queries.newMatchNoDocsQuery();

View File

@ -217,7 +217,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
} }
innerQuery.setBoost(boost); innerQuery.setBoost(boost);
DocumentMapper childDocMapper = context.mapperService().documentMapper(type); DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
if (childDocMapper == null) { if (childDocMapper == null) {
throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]"); throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
} }
@ -231,10 +231,10 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
if (token != XContentParser.Token.START_OBJECT) { if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalStateException("start object expected but was: [" + token + "]"); throw new IllegalStateException("start object expected but was: [" + token + "]");
} }
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
if (innerHits != null) { if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper); InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), childDocMapper);
String name = innerHits.getName() != null ? innerHits.getName() : type; String name = innerHits.getName() != null ? innerHits.getName() : type;
context.addInnerHits(name, parentChildInnerHits); context.addInnerHits(name, parentChildInnerHits);
} }
@ -242,7 +242,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
} }
String parentType = parentFieldMapper.type(); String parentType = parentFieldMapper.type();
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType); DocumentMapper parentDocMapper = context.getMapperService().documentMapper(parentType);
if (parentDocMapper == null) { if (parentDocMapper == null) {
throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type [" throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type ["
+ parentType + "]"); + parentType + "]");

View File

@ -130,7 +130,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
return null; return null;
} }
innerQuery.setBoost(boost); innerQuery.setBoost(boost);
DocumentMapper parentDocMapper = context.mapperService().documentMapper(type); DocumentMapper parentDocMapper = context.getMapperService().documentMapper(type);
if (parentDocMapper == null) { if (parentDocMapper == null) {
throw new QueryShardException(context, "[has_parent] query configured 'parent_type' [" + type throw new QueryShardException(context, "[has_parent] query configured 'parent_type' [" + type
+ "] is not a valid type"); + "] is not a valid type");
@ -142,10 +142,10 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
if (token != XContentParser.Token.START_OBJECT) { if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalStateException("start object expected but was: [" + token + "]"); throw new IllegalStateException("start object expected but was: [" + token + "]");
} }
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
if (innerHits != null) { if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), parentDocMapper); InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), parentDocMapper);
String name = innerHits.getName() != null ? innerHits.getName() : type; String name = innerHits.getName() != null ? innerHits.getName() : type;
context.addInnerHits(name, parentChildInnerHits); context.addInnerHits(name, parentChildInnerHits);
} }
@ -155,10 +155,10 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
Set<String> parentTypes = new HashSet<>(5); Set<String> parentTypes = new HashSet<>(5);
parentTypes.add(parentDocMapper.type()); parentTypes.add(parentDocMapper.type());
ParentChildIndexFieldData parentChildIndexFieldData = null; ParentChildIndexFieldData parentChildIndexFieldData = null;
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) { for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper(); ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper.active()) { if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = context.mapperService().documentMapper(parentFieldMapper.type()); DocumentMapper parentTypeDocumentMapper = context.getMapperService().documentMapper(parentFieldMapper.type());
parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType()); parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
if (parentTypeDocumentMapper == null) { if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent. // Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
@ -172,14 +172,14 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
Query parentTypeQuery = null; Query parentTypeQuery = null;
if (parentTypes.size() == 1) { if (parentTypes.size() == 1) {
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypes.iterator().next()); DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypes.iterator().next());
if (documentMapper != null) { if (documentMapper != null) {
parentTypeQuery = documentMapper.typeFilter(); parentTypeQuery = documentMapper.typeFilter();
} }
} else { } else {
BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder(); BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
for (String parentTypeStr : parentTypes) { for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr); DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypeStr);
if (documentMapper != null) { if (documentMapper != null) {
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
} }

View File

@ -115,7 +115,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
if (types.length == 0) { if (types.length == 0) {
typesForQuery = context.queryTypes(); typesForQuery = context.queryTypes();
} else if (types.length == 1 && MetaData.ALL.equals(types[0])) { } else if (types.length == 1 && MetaData.ALL.equals(types[0])) {
typesForQuery = context.mapperService().types(); typesForQuery = context.getMapperService().types();
} else { } else {
typesForQuery = new HashSet<>(); typesForQuery = new HashSet<>();
Collections.addAll(typesForQuery, types); Collections.addAll(typesForQuery, types);

View File

@ -1,272 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptService;
import java.io.IOException;
public class IndexQueryParserService extends AbstractIndexComponent {
public static final String DEFAULT_FIELD = "index.query.default_field";
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard";
public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard";
public static final String PARSE_STRICT = "index.query.parse.strict";
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
private final InnerHitsQueryParserHelper innerHitsQueryParserHelper;
private CloseableThreadLocal<QueryShardContext> cache = new CloseableThreadLocal<QueryShardContext>() {
@Override
protected QueryShardContext initialValue() {
return new QueryShardContext(IndexQueryParserService.this);
}
};
final AnalysisService analysisService;
final ScriptService scriptService;
final MapperService mapperService;
final SimilarityService similarityService;
final IndexCache indexCache;
protected IndexFieldDataService fieldDataService;
final ClusterService clusterService;
final IndexNameExpressionResolver indexNameExpressionResolver;
final BitsetFilterCache bitsetFilterCache;
private final IndicesQueriesRegistry indicesQueriesRegistry;
private final String defaultField;
private final boolean queryStringLenient;
private final boolean queryStringAnalyzeWildcard;
private final boolean queryStringAllowLeadingWildcard;
private final ParseFieldMatcher parseFieldMatcher;
private final boolean defaultAllowUnmappedFields;
private final Client client;
@Inject
public IndexQueryParserService(IndexSettings indexSettings,
IndicesQueriesRegistry indicesQueriesRegistry,
ScriptService scriptService, AnalysisService analysisService,
MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService,
BitsetFilterCache bitsetFilterCache,
@Nullable SimilarityService similarityService, ClusterService clusterService,
IndexNameExpressionResolver indexNameExpressionResolver,
InnerHitsQueryParserHelper innerHitsQueryParserHelper, Client client) {
super(indexSettings);
this.scriptService = scriptService;
this.analysisService = analysisService;
this.mapperService = mapperService;
this.similarityService = similarityService;
this.indexCache = indexCache;
this.fieldDataService = fieldDataService;
this.bitsetFilterCache = bitsetFilterCache;
this.clusterService = clusterService;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.defaultField = this.indexSettings.getSettings().get(DEFAULT_FIELD, AllFieldMapper.NAME);
this.queryStringLenient = this.indexSettings.getSettings().getAsBoolean(QUERY_STRING_LENIENT, false);
this.queryStringAnalyzeWildcard = indexSettings.getSettings().getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false);
this.queryStringAllowLeadingWildcard = indexSettings.getSettings().getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true);
this.parseFieldMatcher = new ParseFieldMatcher(this.indexSettings.getSettings());
this.defaultAllowUnmappedFields = this.indexSettings.getSettings().getAsBoolean(ALLOW_UNMAPPED, true);
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
this.client = client;
}
public void close() {
cache.close();
}
public String defaultField() {
return this.defaultField;
}
public boolean queryStringAnalyzeWildcard() {
return this.queryStringAnalyzeWildcard;
}
public boolean queryStringAllowLeadingWildcard() {
return this.queryStringAllowLeadingWildcard;
}
public boolean queryStringLenient() {
return this.queryStringLenient;
}
public IndicesQueriesRegistry indicesQueriesRegistry() {
return indicesQueriesRegistry;
}
public ParsedQuery parse(BytesReference source) {
QueryShardContext context = cache.get();
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
return innerParse(context, parser);
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(XContentParser parser) {
try {
return innerParse(cache.get(), parser);
} catch(IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
}
}
/**
* Parses an inner filter, returning null if the filter should be ignored.
*/
@Nullable
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
QueryShardContext context = cache.get();
context.reset(parser);
try {
context.parseFieldMatcher(parseFieldMatcher);
Query filter = context.parseContext().parseInnerQueryBuilder().toFilter(context);
if (filter == null) {
return null;
}
return new ParsedQuery(filter, context.copyNamedQueries());
} finally {
context.reset(null);
}
}
public QueryShardContext getShardContext() {
return cache.get();
}
public boolean defaultAllowUnmappedFields() {
return defaultAllowUnmappedFields;
}
/**
* @return The lowest node version in the cluster when the index was created or <code>null</code> if that was unknown
*/
public Version getIndexCreatedVersion() {
return indexSettings.getIndexVersionCreated();
}
private ParsedQuery innerParse(QueryShardContext context, XContentParser parser) throws IOException, QueryShardException {
context.reset(parser);
try {
context.parseFieldMatcher(parseFieldMatcher);
Query query = parseInnerQuery(context);
return new ParsedQuery(query, context.copyNamedQueries());
} finally {
context.reset(null);
}
}
public Query parseInnerQuery(QueryShardContext context) throws IOException {
return toQuery(context.parseContext().parseInnerQueryBuilder(), context);
}
public ParsedQuery toQuery(QueryBuilder<?> queryBuilder) {
QueryShardContext context = cache.get();
context.reset();
context.parseFieldMatcher(parseFieldMatcher);
try {
Query query = toQuery(queryBuilder, context);
return new ParsedQuery(query, context.copyNamedQueries());
} catch(QueryShardException | ParsingException e ) {
throw e;
} catch(Exception e) {
throw new QueryShardException(context, "failed to create query: {}", e, queryBuilder);
} finally {
context.reset();
}
}
private static Query toQuery(QueryBuilder<?> queryBuilder, QueryShardContext context) throws IOException {
Query query = queryBuilder.toQuery(context);
if (query == null) {
query = Queries.newMatchNoDocsQuery();
}
return query;
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
public boolean matchesIndices(String... indices) {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterService.state(), IndicesOptions.lenientExpandOpen(), indices);
for (String index : concreteIndices) {
if (Regex.simpleMatch(index, index().name())) {
return true;
}
}
return false;
}
public InnerHitsQueryParserHelper getInnerHitsQueryParserHelper() {
return innerHitsQueryParserHelper;
}
public Client getClient() {
return client;
}
}

View File

@ -346,7 +346,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
@Override @Override
protected Query doToQuery(QueryShardContext context) throws IOException { protected Query doToQuery(QueryShardContext context) throws IOException {
// validate context specific fields // validate context specific fields
if (analyzer != null && context.analysisService().analyzer(analyzer) == null) { if (analyzer != null && context.getAnalysisService().analyzer(analyzer) == null) {
throw new QueryShardException(context, "[match] analyzer [" + analyzer + "] not found"); throw new QueryShardException(context, "[match] analyzer [" + analyzer + "] not found");
} }

View File

@ -126,7 +126,7 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
throw new QueryShardException(context, "missing must have either existence, or null_value, or both set to true"); throw new QueryShardException(context, "missing must have either existence, or null_value, or both set to true");
} }
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.mapperService().fullName(FieldNamesFieldMapper.NAME); final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.getMapperService().fullName(FieldNamesFieldMapper.NAME);
if (fieldNamesFieldType == null) { if (fieldNamesFieldType == null) {
// can only happen when no types exist, so no docs exist either // can only happen when no types exist, so no docs exist either
return Queries.newMatchNoDocsQuery(); return Queries.newMatchNoDocsQuery();

View File

@ -775,7 +775,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
MoreLikeThisQuery mltQuery = new MoreLikeThisQuery(); MoreLikeThisQuery mltQuery = new MoreLikeThisQuery();
// set similarity // set similarity
mltQuery.setSimilarity(context.searchSimilarity()); mltQuery.setSimilarity(context.getSearchSimilarity());
// set query parameters // set query parameters
mltQuery.setMaxQueryTerms(maxQueryTerms); mltQuery.setMaxQueryTerms(maxQueryTerms);
@ -796,9 +796,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} }
// set analyzer // set analyzer
Analyzer analyzerObj = context.analysisService().analyzer(analyzer); Analyzer analyzerObj = context.getAnalysisService().analyzer(analyzer);
if (analyzerObj == null) { if (analyzerObj == null) {
analyzerObj = context.mapperService().searchAnalyzer(); analyzerObj = context.getMapperService().searchAnalyzer();
} }
mltQuery.setAnalyzer(analyzerObj); mltQuery.setAnalyzer(analyzerObj);

View File

@ -504,7 +504,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
protected Query doToQuery(QueryShardContext context) throws IOException { protected Query doToQuery(QueryShardContext context) throws IOException {
MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context); MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context);
if (analyzer != null) { if (analyzer != null) {
if (context.analysisService().analyzer(analyzer) == null) { if (context.getAnalysisService().analyzer(analyzer) == null) {
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found"); throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
} }
multiMatchQuery.setAnalyzer(analyzer); multiMatchQuery.setAnalyzer(analyzer);
@ -539,7 +539,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
} }
} }
Map<String, Float> newFieldsBoosts = handleFieldsMatchPattern(context.mapperService(), fieldsBoosts); Map<String, Float> newFieldsBoosts = handleFieldsMatchPattern(context.getMapperService(), fieldsBoosts);
Query query = multiMatchQuery.parse(type, newFieldsBoosts, value, minimumShouldMatch); Query query = multiMatchQuery.parse(type, newFieldsBoosts, value, minimumShouldMatch);
if (query == null) { if (query == null) {

View File

@ -212,7 +212,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
if (token != XContentParser.Token.START_OBJECT) { if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalStateException("start object expected but was: [" + token + "]"); throw new IllegalStateException("start object expected but was: [" + token + "]");
} }
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser); InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
if (innerHits != null) { if (innerHits != null) {
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries()); ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());

View File

@ -27,14 +27,19 @@ import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
@ -42,15 +47,20 @@ import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.Template; import org.elasticsearch.script.Template;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
@ -63,7 +73,14 @@ import static java.util.Collections.unmodifiableMap;
*/ */
public class QueryShardContext { public class QueryShardContext {
private static ThreadLocal<String[]> typesContext = new ThreadLocal<>(); private static final ThreadLocal<String[]> typesContext = new ThreadLocal<>();
private final MapperService mapperService;
private final ScriptService scriptService;
private final SimilarityService similarityService;
private final BitsetFilterCache bitsetFilterCache;
private final IndexFieldDataService indexFieldDataService;
private final IndexSettings indexSettings;
private final Client client;
public static void setTypes(String[] types) { public static void setTypes(String[] types) {
typesContext.set(types); typesContext.set(types);
@ -83,28 +100,31 @@ public class QueryShardContext {
typesContext.remove(); typesContext.remove();
} }
private final Version indexVersionCreated;
private final IndexQueryParserService indexQueryParser;
private final Map<String, Query> namedQueries = new HashMap<>(); private final Map<String, Query> namedQueries = new HashMap<>();
private final MapperQueryParser queryParser = new MapperQueryParser(this); private final MapperQueryParser queryParser = new MapperQueryParser(this);
private final IndicesQueriesRegistry indicesQueriesRegistry;
private boolean allowUnmappedFields; private boolean allowUnmappedFields;
private boolean mapUnmappedFieldAsString; private boolean mapUnmappedFieldAsString;
private NestedScope nestedScope; private NestedScope nestedScope;
private QueryParseContext parseContext; private QueryParseContext parseContext;
boolean isFilter; // pkg private for testing
boolean isFilter; public QueryShardContext(IndexSettings indexSettings, Client client, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
final IndicesQueriesRegistry indicesQueriesRegistry) {
this.indexSettings = indexSettings;
this.scriptService = scriptService;
this.client = client;
this.similarityService = similarityService;
this.mapperService = mapperService;
this.bitsetFilterCache = bitsetFilterCache;
this.indexFieldDataService = indexFieldDataService;
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.parseContext = new QueryParseContext(indicesQueriesRegistry);
}
public QueryShardContext(IndexQueryParserService indexQueryParser) { public QueryShardContext clone() {
this.indexVersionCreated = indexQueryParser.getIndexCreatedVersion(); return new QueryShardContext(indexSettings, client, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
this.indexQueryParser = indexQueryParser;
this.parseContext = new QueryParseContext(indexQueryParser.indicesQueriesRegistry());
} }
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) { public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
@ -116,11 +136,12 @@ public class QueryShardContext {
} }
public void reset() { public void reset() {
allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields(); allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.parseFieldMatcher(ParseFieldMatcher.EMPTY); this.parseFieldMatcher(ParseFieldMatcher.EMPTY);
this.lookup = null; this.lookup = null;
this.namedQueries.clear(); this.namedQueries.clear();
this.nestedScope = new NestedScope(); this.nestedScope = new NestedScope();
this.isFilter = false;
} }
public void reset(XContentParser jp) { public void reset(XContentParser jp) {
@ -129,43 +150,43 @@ public class QueryShardContext {
} }
public Index index() { public Index index() {
return this.indexQueryParser.index(); return this.mapperService.getIndexSettings().getIndex();
} }
public IndexQueryParserService indexQueryParserService() { public InnerHitsSubSearchContext getInnerHitsContext(XContentParser parser) throws IOException {
return indexQueryParser; return InnerHitsQueryParserHelper.parse(parser);
} }
public AnalysisService analysisService() { public AnalysisService getAnalysisService() {
return indexQueryParser.analysisService; return mapperService.analysisService();
} }
public ScriptService scriptService() { public ScriptService getScriptService() {
return indexQueryParser.scriptService; return scriptService;
} }
public MapperService mapperService() { public MapperService getMapperService() {
return indexQueryParser.mapperService; return mapperService;
} }
public Similarity searchSimilarity() { public Similarity getSearchSimilarity() {
return indexQueryParser.similarityService != null ? indexQueryParser.similarityService.similarity(indexQueryParser.mapperService) : null; return similarityService != null ? similarityService.similarity(mapperService) : null;
} }
public String defaultField() { public String defaultField() {
return indexQueryParser.defaultField(); return indexSettings.getDefaultField();
} }
public boolean queryStringLenient() { public boolean queryStringLenient() {
return indexQueryParser.queryStringLenient(); return indexSettings.isQueryStringLenient();
} }
public boolean queryStringAnalyzeWildcard() { public boolean queryStringAnalyzeWildcard() {
return indexQueryParser.queryStringAnalyzeWildcard(); return indexSettings.isQueryStringAnalyzeWildcard();
} }
public boolean queryStringAllowLeadingWildcard() { public boolean queryStringAllowLeadingWildcard() {
return indexQueryParser.queryStringAllowLeadingWildcard(); return indexSettings.isQueryStringAllowLeadingWildcard();
} }
public MapperQueryParser queryParser(QueryParserSettings settings) { public MapperQueryParser queryParser(QueryParserSettings settings) {
@ -174,11 +195,11 @@ public class QueryShardContext {
} }
public BitSetProducer bitsetFilter(Query filter) { public BitSetProducer bitsetFilter(Query filter) {
return indexQueryParser.bitsetFilterCache.getBitSetProducer(filter); return bitsetFilterCache.getBitSetProducer(filter);
} }
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) { public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
return indexQueryParser.fieldDataService.getForField(mapper); return indexFieldDataService.getForField(mapper);
} }
public void addNamedQuery(String name, Query query) { public void addNamedQuery(String name, Query query) {
@ -211,7 +232,7 @@ public class QueryShardContext {
InnerHitsContext innerHitsContext; InnerHitsContext innerHitsContext;
if (sc.innerHits() == null) { if (sc.innerHits() == null) {
innerHitsContext = new InnerHitsContext(new HashMap<String, InnerHitsContext.BaseInnerHits>()); innerHitsContext = new InnerHitsContext(new HashMap<>());
sc.innerHits(innerHitsContext); sc.innerHits(innerHitsContext);
} else { } else {
innerHitsContext = sc.innerHits(); innerHitsContext = sc.innerHits();
@ -220,15 +241,15 @@ public class QueryShardContext {
} }
public Collection<String> simpleMatchToIndexNames(String pattern) { public Collection<String> simpleMatchToIndexNames(String pattern) {
return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern); return mapperService.simpleMatchToIndexNames(pattern);
} }
public MappedFieldType fieldMapper(String name) { public MappedFieldType fieldMapper(String name) {
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes())); return failIfFieldMappingNotFound(name, mapperService.smartNameFieldType(name, getTypes()));
} }
public ObjectMapper getObjectMapper(String name) { public ObjectMapper getObjectMapper(String name) {
return indexQueryParser.mapperService.getObjectMapper(name, getTypes()); return mapperService.getObjectMapper(name, getTypes());
} }
/** /**
@ -239,7 +260,7 @@ public class QueryShardContext {
if (fieldType.searchAnalyzer() != null) { if (fieldType.searchAnalyzer() != null) {
return fieldType.searchAnalyzer(); return fieldType.searchAnalyzer();
} }
return mapperService().searchAnalyzer(); return getMapperService().searchAnalyzer();
} }
/** /**
@ -250,7 +271,7 @@ public class QueryShardContext {
if (fieldType.searchQuoteAnalyzer() != null) { if (fieldType.searchQuoteAnalyzer() != null) {
return fieldType.searchQuoteAnalyzer(); return fieldType.searchQuoteAnalyzer();
} }
return mapperService().searchQuoteAnalyzer(); return getMapperService().searchQuoteAnalyzer();
} }
public void setAllowUnmappedFields(boolean allowUnmappedFields) { public void setAllowUnmappedFields(boolean allowUnmappedFields) {
@ -266,11 +287,9 @@ public class QueryShardContext {
return fieldMapping; return fieldMapping;
} else if (mapUnmappedFieldAsString) { } else if (mapUnmappedFieldAsString) {
StringFieldMapper.Builder builder = MapperBuilders.stringField(name); StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
// it would be better to pass the real index settings, but they are not easily accessible from here... return builder.build(new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath(1))).fieldType();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build();
return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType();
} else { } else {
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion(); Version indexCreatedVersion = indexSettings.getIndexVersionCreated();
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) { if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {
throw new QueryShardException(this, "Strict field resolution and no field mapping can be found for the field with name [" throw new QueryShardException(this, "Strict field resolution and no field mapping can be found for the field with name ["
+ name + "]"); + name + "]");
@ -286,10 +305,10 @@ public class QueryShardContext {
public Collection<String> queryTypes() { public Collection<String> queryTypes() {
String[] types = getTypes(); String[] types = getTypes();
if (types == null || types.length == 0) { if (types == null || types.length == 0) {
return mapperService().types(); return getMapperService().types();
} }
if (types.length == 1 && types[0].equals("_all")) { if (types.length == 1 && types[0].equals("_all")) {
return mapperService().types(); return getMapperService().types();
} }
return Arrays.asList(types); return Arrays.asList(types);
} }
@ -302,7 +321,7 @@ public class QueryShardContext {
return current.lookup(); return current.lookup();
} }
if (lookup == null) { if (lookup == null) {
lookup = new SearchLookup(mapperService(), indexQueryParser.fieldDataService, null); lookup = new SearchLookup(getMapperService(), indexFieldDataService, null);
} }
return lookup; return lookup;
} }
@ -320,7 +339,7 @@ public class QueryShardContext {
} }
public Version indexVersionCreated() { public Version indexVersionCreated() {
return indexVersionCreated; return indexSettings.getIndexVersionCreated();
} }
public QueryParseContext parseContext() { public QueryParseContext parseContext() {
@ -328,18 +347,105 @@ public class QueryShardContext {
} }
public boolean matchesIndices(String... indices) { public boolean matchesIndices(String... indices) {
return this.indexQueryParser.matchesIndices(indices); for (String index : indices) {
if (indexSettings.isMatchIndexName(index)) {
return true;
}
}
return false;
} }
/* /*
* Executes the given template, and returns the response. * Executes the given template, and returns the response.
*/ */
public BytesReference executeQueryTemplate(Template template, SearchContext searchContext) { public BytesReference executeQueryTemplate(Template template, SearchContext searchContext) {
ExecutableScript executable = scriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext); ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext);
return (BytesReference) executable.run(); return (BytesReference) executable.run();
} }
public Client getClient() { public Client getClient() {
return indexQueryParser.getClient(); return client;
} }
public ParsedQuery parse(BytesReference source) {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
return innerParse(parser);
} catch (ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(XContentParser parser) {
try {
return innerParse(parser);
} catch(IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
}
}
/**
* Parses an inner filter, returning null if the filter should be ignored.
*/
@Nullable
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
reset(parser);
try {
parseFieldMatcher(indexSettings.getParseFieldMatcher());
Query filter = parseContext().parseInnerQueryBuilder().toFilter(this);
if (filter == null) {
return null;
}
return new ParsedQuery(filter, copyNamedQueries());
} finally {
reset(null);
}
}
private ParsedQuery innerParse(XContentParser parser) throws IOException, QueryShardException {
reset(parser);
try {
parseFieldMatcher(indexSettings.getParseFieldMatcher());
Query query = parseInnerQuery();
return new ParsedQuery(query, copyNamedQueries());
} finally {
reset(null);
}
}
public Query parseInnerQuery() throws IOException {
return toQuery(this.parseContext().parseInnerQueryBuilder(), this);
}
public ParsedQuery toQuery(QueryBuilder<?> queryBuilder) {
reset();
parseFieldMatcher(indexSettings.getParseFieldMatcher());
try {
Query query = toQuery(queryBuilder, this);
return new ParsedQuery(query, copyNamedQueries());
} catch(QueryShardException | ParsingException e ) {
throw e;
} catch(Exception e) {
throw new QueryShardException(this, "failed to create query: {}", e, queryBuilder);
} finally {
this.reset();
}
}
private static Query toQuery(QueryBuilder<?> queryBuilder, QueryShardContext context) throws IOException {
Query query = queryBuilder.toQuery(context);
if (query == null) {
query = Queries.newMatchNoDocsQuery();
}
return query;
}
} }

View File

@ -661,7 +661,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
String fieldName = fieldsEntry.getKey(); String fieldName = fieldsEntry.getKey();
Float weight = fieldsEntry.getValue(); Float weight = fieldsEntry.getValue();
if (Regex.isSimpleMatchPattern(fieldName)) { if (Regex.isSimpleMatchPattern(fieldName)) {
for (String resolvedFieldName : context.mapperService().simpleMatchToIndexNames(fieldName)) { for (String resolvedFieldName : context.getMapperService().simpleMatchToIndexNames(fieldName)) {
resolvedFields.put(resolvedFieldName, weight); resolvedFields.put(resolvedFieldName, weight);
} }
} else { } else {
@ -672,16 +672,16 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
qpSettings.defaultOperator(defaultOperator.toQueryParserOperator()); qpSettings.defaultOperator(defaultOperator.toQueryParserOperator());
if (analyzer == null) { if (analyzer == null) {
qpSettings.defaultAnalyzer(context.mapperService().searchAnalyzer()); qpSettings.defaultAnalyzer(context.getMapperService().searchAnalyzer());
} else { } else {
NamedAnalyzer namedAnalyzer = context.analysisService().analyzer(analyzer); NamedAnalyzer namedAnalyzer = context.getAnalysisService().analyzer(analyzer);
if (namedAnalyzer == null) { if (namedAnalyzer == null) {
throw new QueryShardException(context, "[query_string] analyzer [" + analyzer + "] not found"); throw new QueryShardException(context, "[query_string] analyzer [" + analyzer + "] not found");
} }
qpSettings.forceAnalyzer(namedAnalyzer); qpSettings.forceAnalyzer(namedAnalyzer);
} }
if (quoteAnalyzer != null) { if (quoteAnalyzer != null) {
NamedAnalyzer namedAnalyzer = context.analysisService().analyzer(quoteAnalyzer); NamedAnalyzer namedAnalyzer = context.getAnalysisService().analyzer(quoteAnalyzer);
if (namedAnalyzer == null) { if (namedAnalyzer == null) {
throw new QueryShardException(context, "[query_string] quote_analyzer [" + quoteAnalyzer + "] not found"); throw new QueryShardException(context, "[query_string] quote_analyzer [" + quoteAnalyzer + "] not found");
} }
@ -689,7 +689,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
} else if (analyzer != null) { } else if (analyzer != null) {
qpSettings.forceQuoteAnalyzer(qpSettings.analyzer()); qpSettings.forceQuoteAnalyzer(qpSettings.analyzer());
} else { } else {
qpSettings.defaultQuoteAnalyzer(context.mapperService().searchQuoteAnalyzer()); qpSettings.defaultQuoteAnalyzer(context.getMapperService().searchQuoteAnalyzer());
} }
qpSettings.quoteFieldSuffix(quoteFieldSuffix); qpSettings.quoteFieldSuffix(quoteFieldSuffix);

View File

@ -69,7 +69,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
@Override @Override
protected Query doToQuery(QueryShardContext context) throws IOException { protected Query doToQuery(QueryShardContext context) throws IOException {
return new ScriptQuery(script, context.scriptService(), context.lookup()); return new ScriptQuery(script, context.getScriptService(), context.lookup());
} }
static class ScriptQuery extends Query { static class ScriptQuery extends Query {

View File

@ -261,7 +261,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
} else { } else {
for (Map.Entry<String, Float> fieldEntry : fieldsAndWeights.entrySet()) { for (Map.Entry<String, Float> fieldEntry : fieldsAndWeights.entrySet()) {
if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) { if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) {
for (String fieldName : context.mapperService().simpleMatchToIndexNames(fieldEntry.getKey())) { for (String fieldName : context.getMapperService().simpleMatchToIndexNames(fieldEntry.getKey())) {
resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue()); resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue());
} }
} else { } else {
@ -273,9 +273,9 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
// Use standard analyzer by default if none specified // Use standard analyzer by default if none specified
Analyzer luceneAnalyzer; Analyzer luceneAnalyzer;
if (analyzer == null) { if (analyzer == null) {
luceneAnalyzer = context.mapperService().searchAnalyzer(); luceneAnalyzer = context.getMapperService().searchAnalyzer();
} else { } else {
luceneAnalyzer = context.analysisService().analyzer(analyzer); luceneAnalyzer = context.getAnalysisService().analyzer(analyzer);
if (luceneAnalyzer == null) { if (luceneAnalyzer == null) {
throw new QueryShardException(context, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer throw new QueryShardException(context, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer
+ "] not found"); + "] not found");

View File

@ -102,7 +102,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
protected Query doToQuery(QueryShardContext context) throws IOException { protected Query doToQuery(QueryShardContext context) throws IOException {
BytesReference querySource = context.executeQueryTemplate(template, SearchContext.current()); BytesReference querySource = context.executeQueryTemplate(template, SearchContext.current());
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryShardContext contextCopy = new QueryShardContext(context.indexQueryParserService()); final QueryShardContext contextCopy = context.clone();
contextCopy.reset(qSourceParser); contextCopy.reset(qSourceParser);
QueryBuilder result = contextCopy.parseContext().parseInnerQueryBuilder(); QueryBuilder result = contextCopy.parseContext().parseInnerQueryBuilder();
context.combineNamedQueries(contextCopy); context.combineNamedQueries(contextCopy);

View File

@ -76,7 +76,7 @@ public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
protected Query doToQuery(QueryShardContext context) throws IOException { protected Query doToQuery(QueryShardContext context) throws IOException {
Query filter; Query filter;
//LUCENE 4 UPGRADE document mapper should use bytesref as well? //LUCENE 4 UPGRADE document mapper should use bytesref as well?
DocumentMapper documentMapper = context.mapperService().documentMapper(type.utf8ToString()); DocumentMapper documentMapper = context.getMapperService().documentMapper(type.utf8ToString());
if (documentMapper == null) { if (documentMapper == null) {
filter = new TermQuery(new Term(TypeFieldMapper.NAME, type)); filter = new TermQuery(new Term(TypeFieldMapper.NAME, type));
} else { } else {

View File

@ -106,9 +106,9 @@ public class WrapperQueryBuilder extends AbstractQueryBuilder<WrapperQueryBuilde
@Override @Override
protected Query doToQuery(QueryShardContext context) throws IOException { protected Query doToQuery(QueryShardContext context) throws IOException {
try (XContentParser qSourceParser = XContentFactory.xContent(source).createParser(source)) { try (XContentParser qSourceParser = XContentFactory.xContent(source).createParser(source)) {
final QueryShardContext contextCopy = new QueryShardContext(context.indexQueryParserService()); final QueryShardContext contextCopy = context.clone();
contextCopy.reset(qSourceParser); contextCopy.reset(qSourceParser);
contextCopy.parseFieldMatcher(context.indexQueryParserService().parseFieldMatcher()); contextCopy.parseFieldMatcher(context.parseFieldMatcher());
QueryBuilder<?> result = contextCopy.parseContext().parseInnerQueryBuilder(); QueryBuilder<?> result = contextCopy.parseContext().parseInnerQueryBuilder();
context.combineNamedQueries(contextCopy); context.combineNamedQueries(contextCopy);
return result.toQuery(context); return result.toQuery(context);

View File

@ -148,7 +148,7 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder<FieldV
@Override @Override
protected ScoreFunction doToFunction(QueryShardContext context) { protected ScoreFunction doToFunction(QueryShardContext context) {
MappedFieldType fieldType = context.mapperService().smartNameFieldType(field); MappedFieldType fieldType = context.getMapperService().smartNameFieldType(field);
IndexNumericFieldData fieldData = null; IndexNumericFieldData fieldData = null;
if (fieldType == null) { if (fieldType == null) {
if(missing == null) { if(missing == null) {

View File

@ -117,7 +117,7 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
@Override @Override
protected ScoreFunction doToFunction(QueryShardContext context) { protected ScoreFunction doToFunction(QueryShardContext context) {
final MappedFieldType fieldType = context.mapperService().smartNameFieldType("_uid"); final MappedFieldType fieldType = context.getMapperService().smartNameFieldType("_uid");
if (fieldType == null) { if (fieldType == null) {
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used // mapper could be null if we are on a shard with no docs yet, so this won't actually be used
return new RandomScoreFunction(); return new RandomScoreFunction();

View File

@ -89,7 +89,7 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScore
@Override @Override
protected ScoreFunction doToFunction(QueryShardContext context) { protected ScoreFunction doToFunction(QueryShardContext context) {
try { try {
SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH); SearchScript searchScript = context.getScriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH);
return new ScriptScoreFunction(script, searchScript); return new ScriptScoreFunction(script, searchScript);
} catch (Exception e) { } catch (Exception e) {
throw new QueryShardException(context, "script_score: the script could not be loaded", e); throw new QueryShardException(context, "script_score: the script could not be loaded", e);

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query.support; package org.elasticsearch.index.query.support;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
@ -34,22 +33,15 @@ import java.io.IOException;
public class InnerHitsQueryParserHelper { public class InnerHitsQueryParserHelper {
private final SortParseElement sortParseElement; public static final InnerHitsQueryParserHelper INSTANCE = new InnerHitsQueryParserHelper();
private final FetchSourceParseElement sourceParseElement;
private final HighlighterParseElement highlighterParseElement;
private final ScriptFieldsParseElement scriptFieldsParseElement;
private final FieldDataFieldsParseElement fieldDataFieldsParseElement;
@Inject private static final SortParseElement sortParseElement = new SortParseElement();
public InnerHitsQueryParserHelper(SortParseElement sortParseElement, FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, ScriptFieldsParseElement scriptFieldsParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement) { private static final FetchSourceParseElement sourceParseElement = new FetchSourceParseElement();
this.sortParseElement = sortParseElement; private static final HighlighterParseElement highlighterParseElement = new HighlighterParseElement();
this.sourceParseElement = sourceParseElement; private static final ScriptFieldsParseElement scriptFieldsParseElement = new ScriptFieldsParseElement();
this.highlighterParseElement = highlighterParseElement; private static final FieldDataFieldsParseElement fieldDataFieldsParseElement = new FieldDataFieldsParseElement();
this.scriptFieldsParseElement = scriptFieldsParseElement;
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
}
public InnerHitsSubSearchContext parse(XContentParser parser) throws IOException { public static InnerHitsSubSearchContext parse(XContentParser parser) throws IOException {
String fieldName = null; String fieldName = null;
XContentParser.Token token; XContentParser.Token token;
String innerHitName = null; String innerHitName = null;

View File

@ -61,8 +61,8 @@ public class NestedInnerQueryParseSupport {
private ObjectMapper parentObjectMapper; private ObjectMapper parentObjectMapper;
public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) { public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) {
parseContext = searchContext.queryParserService().getShardContext().parseContext(); shardContext = searchContext.indexShard().getQueryShardContext();
shardContext = searchContext.queryParserService().getShardContext(); parseContext = shardContext.parseContext();
shardContext.reset(parser); shardContext.reset(parser);
} }

View File

@ -214,9 +214,9 @@ public class MatchQuery {
if (fieldType != null) { if (fieldType != null) {
return context.getSearchAnalyzer(fieldType); return context.getSearchAnalyzer(fieldType);
} }
return context.mapperService().searchAnalyzer(); return context.getMapperService().searchAnalyzer();
} else { } else {
Analyzer analyzer = context.mapperService().analysisService().analyzer(this.analyzer); Analyzer analyzer = context.getMapperService().analysisService().analyzer(this.analyzer);
if (analyzer == null) { if (analyzer == null) {
throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]"); throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]");
} }

View File

@ -27,6 +27,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy;
import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.search.UsageTrackingQueryCachingPolicy;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.CloseableThreadLocal;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.ThreadInterruptedException; import org.apache.lucene.util.ThreadInterruptedException;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
@ -81,6 +82,7 @@ import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.percolator.PercolateStats;
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.SearchStats;
@ -152,6 +154,7 @@ public class IndexShard extends AbstractIndexShardComponent {
private final IndicesQueryCache indicesQueryCache; private final IndicesQueryCache indicesQueryCache;
private final IndexEventListener indexEventListener; private final IndexEventListener indexEventListener;
private final IndexSettings idxSettings; private final IndexSettings idxSettings;
private final IndexServicesProvider provider;
private TimeValue refreshInterval; private TimeValue refreshInterval;
@ -252,13 +255,14 @@ public class IndexShard extends AbstractIndexShardComponent {
this.flushThresholdSize = this.indexSettings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)); this.flushThresholdSize = this.indexSettings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB));
this.disableFlush = this.indexSettings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false); this.disableFlush = this.indexSettings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false);
this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId);
this.provider = provider;
this.searcherWrapper = indexSearcherWrapper; this.searcherWrapper = indexSearcherWrapper;
this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, provider.getQueryParserService(), indexingService, mapperService, indexFieldDataService); this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, indexingService, mapperService, newQueryShardContext(), indexFieldDataService);
if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) { if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) {
percolatorQueriesRegistry.enableRealTimePercolator(); percolatorQueriesRegistry.enableRealTimePercolator();
} }
// We start up inactive // We start up inactive
active.set(false); active.set(false);
} }
@ -762,7 +766,7 @@ public class IndexShard extends AbstractIndexShardComponent {
engine.flushAndClose(); engine.flushAndClose();
} }
} finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times } finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times
IOUtils.close(engine, percolatorQueriesRegistry); IOUtils.close(engine, percolatorQueriesRegistry, queryShardContextCache);
} }
} }
} }
@ -1598,4 +1602,23 @@ public class IndexShard extends AbstractIndexShardComponent {
} }
} }
private CloseableThreadLocal<QueryShardContext> queryShardContextCache = new CloseableThreadLocal<QueryShardContext>() {
// TODO We should get rid of this threadlocal but I think it should be a sep change
@Override
protected QueryShardContext initialValue() {
return newQueryShardContext();
}
};
private QueryShardContext newQueryShardContext() {
return new QueryShardContext(idxSettings, provider.getClient(), indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry());
}
/**
* Returns a threadlocal QueryShardContext for this shard.
*/
public QueryShardContext getQueryShardContext() {
return queryShardContextCache.get();
}
} }

View File

@ -28,12 +28,16 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag;
import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.*; import org.elasticsearch.common.inject.*;
import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsExecutors;
@ -50,7 +54,7 @@ import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.indexing.IndexingStats;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.SearchStats;
@ -60,6 +64,7 @@ import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.PluginsService;
@ -72,6 +77,7 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.stream.Stream; import java.util.stream.Stream;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
@ -93,6 +99,10 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
private final IndicesWarmer indicesWarmer; private final IndicesWarmer indicesWarmer;
private final IndicesQueryCache indicesQueryCache; private final IndicesQueryCache indicesQueryCache;
private final AnalysisRegistry analysisRegistry; private final AnalysisRegistry analysisRegistry;
private final IndicesQueriesRegistry indicesQueriesRegistry;
private final ClusterService clusterService;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private volatile Map<String, IndexServiceInjectorPair> indices = emptyMap(); private volatile Map<String, IndexServiceInjectorPair> indices = emptyMap();
public AnalysisRegistry getAnalysis() { public AnalysisRegistry getAnalysis() {
@ -127,7 +137,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
} }
@Inject @Inject
public IndicesService(Settings settings, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesQueryCache indicesQueryCache, IndicesWarmer indicesWarmer, AnalysisRegistry analysisRegistry) { public IndicesService(Settings settings, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesQueryCache indicesQueryCache, IndicesWarmer indicesWarmer, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService) {
super(settings); super(settings);
this.injector = injector; this.injector = injector;
this.pluginsService = pluginsService; this.pluginsService = pluginsService;
@ -137,6 +147,9 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS)); this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS));
this.indexStoreConfig = new IndexStoreConfig(settings); this.indexStoreConfig = new IndexStoreConfig(settings);
this.analysisRegistry = analysisRegistry; this.analysisRegistry = analysisRegistry;
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.clusterService = clusterService;
this.indexNameExpressionResolver = indexNameExpressionResolver;
nodeSettingsService.addListener(indexStoreConfig); nodeSettingsService.addListener(indexStoreConfig);
} }
@ -282,6 +295,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
} }
/** /**
* Creates a new {@link IndexService} for the given metadata. * Creates a new {@link IndexService} for the given metadata.
* @param indexMetaData the index metadata to create the index for * @param indexMetaData the index metadata to create the index for
@ -292,8 +306,9 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
if (!lifecycle.started()) { if (!lifecycle.started()) {
throw new IllegalStateException("Can't create an index [" + indexMetaData.getIndex() + "], node is closed"); throw new IllegalStateException("Can't create an index [" + indexMetaData.getIndex() + "], node is closed");
} }
final String indexName = indexMetaData.getIndex();
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST); final Predicate<String> indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state());
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST, indexNameMatcher);
Index index = new Index(indexMetaData.getIndex()); Index index = new Index(indexMetaData.getIndex());
if (indices.containsKey(index.name())) { if (indices.containsKey(index.name())) {
throw new IndexAlreadyExistsException(index); throw new IndexAlreadyExistsException(index);
@ -384,9 +399,6 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
logger.debug("[{}] closing mapper service (reason [{}])", index, reason); logger.debug("[{}] closing mapper service (reason [{}])", index, reason);
indexInjector.getInstance(MapperService.class).close(); indexInjector.getInstance(MapperService.class).close();
logger.debug("[{}] closing index query parser service (reason [{}])", index, reason);
indexInjector.getInstance(IndexQueryParserService.class).close();
logger.debug("[{}] closed... (reason [{}])", index, reason); logger.debug("[{}] closed... (reason [{}])", index, reason);
listener.afterIndexClosed(indexService.index(), indexService.getIndexSettings().getSettings()); listener.afterIndexClosed(indexService.index(), indexService.getIndexSettings().getSettings());
if (delete) { if (delete) {
@ -786,4 +798,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
return deleteList.size(); return deleteList.size();
} }
} }
public QueryParseContext newQueryParserContext() {
return new QueryParseContext(indicesQueriesRegistry);
}
} }

View File

@ -51,7 +51,6 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
@ -423,11 +422,6 @@ public class PercolateContext extends SearchContext {
return indexService.analysisService(); return indexService.analysisService();
} }
@Override
public IndexQueryParserService queryParserService() {
return indexService.queryParserService();
}
@Override @Override
public SimilarityService similarityService() { public SimilarityService similarityService() {
return indexService.similarityService(); return indexService.similarityService();

View File

@ -74,6 +74,7 @@ import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.percolator.QueryCollector.Count; import org.elasticsearch.percolator.QueryCollector.Count;
@ -190,7 +191,7 @@ public class PercolatorService extends AbstractComponent {
indexShard.shardId().index().name(), indexShard.shardId().index().name(),
request.indices() request.indices()
); );
Query aliasFilter = percolateIndexService.aliasFilter(filteringAliases); Query aliasFilter = percolateIndexService.aliasFilter(indexShard.getQueryShardContext(), filteringAliases);
SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id()); SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id());
final PercolateContext context = new PercolateContext( final PercolateContext context = new PercolateContext(
@ -198,7 +199,7 @@ public class PercolatorService extends AbstractComponent {
); );
SearchContext.setCurrent(context); SearchContext.setCurrent(context);
try { try {
ParsedDocument parsedDocument = parseRequest(percolateIndexService, request, context, request.shardId().getIndex()); ParsedDocument parsedDocument = parseRequest(indexShard, request, context, request.shardId().getIndex());
if (context.percolateQueries().isEmpty()) { if (context.percolateQueries().isEmpty()) {
return new PercolateShardResponse(context, request.shardId()); return new PercolateShardResponse(context, request.shardId());
} }
@ -258,7 +259,7 @@ public class PercolatorService extends AbstractComponent {
} }
} }
private ParsedDocument parseRequest(IndexService documentIndexService, PercolateShardRequest request, PercolateContext context, String index) { private ParsedDocument parseRequest(IndexShard shard, PercolateShardRequest request, PercolateContext context, String index) {
BytesReference source = request.source(); BytesReference source = request.source();
if (source == null || source.length() == 0) { if (source == null || source.length() == 0) {
return null; return null;
@ -276,6 +277,7 @@ public class PercolatorService extends AbstractComponent {
// not the in memory percolate doc // not the in memory percolate doc
String[] previousTypes = context.types(); String[] previousTypes = context.types();
context.types(new String[]{TYPE_NAME}); context.types(new String[]{TYPE_NAME});
QueryShardContext queryShardContext = shard.getQueryShardContext();
try { try {
parser = XContentFactory.xContent(source).createParser(source); parser = XContentFactory.xContent(source).createParser(source);
String currentFieldName = null; String currentFieldName = null;
@ -290,7 +292,7 @@ public class PercolatorService extends AbstractComponent {
throw new ElasticsearchParseException("Either specify doc or get, not both"); throw new ElasticsearchParseException("Either specify doc or get, not both");
} }
MapperService mapperService = documentIndexService.mapperService(); MapperService mapperService = shard.mapperService();
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(request.documentType()); DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(request.documentType());
doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).flyweight(true)); doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).flyweight(true));
if (docMapper.getMapping() != null) { if (docMapper.getMapping() != null) {
@ -312,12 +314,12 @@ public class PercolatorService extends AbstractComponent {
if (context.percolateQuery() != null) { if (context.percolateQuery() != null) {
throw new ElasticsearchParseException("Either specify query or filter, not both"); throw new ElasticsearchParseException("Either specify query or filter, not both");
} }
context.percolateQuery(documentIndexService.queryParserService().parse(parser).query()); context.percolateQuery(queryShardContext.parse(parser).query());
} else if ("filter".equals(currentFieldName)) { } else if ("filter".equals(currentFieldName)) {
if (context.percolateQuery() != null) { if (context.percolateQuery() != null) {
throw new ElasticsearchParseException("Either specify query or filter, not both"); throw new ElasticsearchParseException("Either specify query or filter, not both");
} }
Query filter = documentIndexService.queryParserService().parseInnerFilter(parser).query(); Query filter = queryShardContext.parseInnerFilter(parser).query();
context.percolateQuery(new ConstantScoreQuery(filter)); context.percolateQuery(new ConstantScoreQuery(filter));
} else if ("sort".equals(currentFieldName)) { } else if ("sort".equals(currentFieldName)) {
parseSort(parser, context); parseSort(parser, context);

View File

@ -61,6 +61,7 @@ import org.elasticsearch.index.mapper.MappedFieldType.Loading;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.search.stats.ShardSearchStats; import org.elasticsearch.index.search.stats.ShardSearchStats;
import org.elasticsearch.index.search.stats.StatsGroupsParseElement; import org.elasticsearch.index.search.stats.StatsGroupsParseElement;
import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexEventListener;
@ -559,7 +560,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context); ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context);
BytesReference run = (BytesReference) executable.run(); BytesReference run = (BytesReference) executable.run();
try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) { try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) {
QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry()); QueryParseContext queryParseContext = indicesService.newQueryParserContext();
queryParseContext.reset(parser); queryParseContext.reset(parser);
queryParseContext.parseFieldMatcher(parseFieldMatcher); queryParseContext.parseFieldMatcher(parseFieldMatcher);
parseSource(context, SearchSourceBuilder.parseSearchSource(parser, queryParseContext)); parseSource(context, SearchSourceBuilder.parseSearchSource(parser, queryParseContext));
@ -659,7 +660,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
if (source == null) { if (source == null) {
return; return;
} }
final IndexShard indexShard = context.indexShard();
QueryShardContext queryShardContext = indexShard.getQueryShardContext();
context.from(source.from()); context.from(source.from());
context.size(source.size()); context.size(source.size());
ObjectFloatHashMap<String> indexBoostMap = source.indexBoost(); ObjectFloatHashMap<String> indexBoostMap = source.indexBoost();
@ -670,10 +672,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} }
} }
if (source.query() != null) { if (source.query() != null) {
context.parsedQuery(context.queryParserService().toQuery(source.query())); context.parsedQuery(queryShardContext.toQuery(source.query()));
} }
if (source.postFilter() != null) { if (source.postFilter() != null) {
context.parsedPostFilter(context.queryParserService().toQuery(source.postFilter())); context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
} }
if (source.sorts() != null) { if (source.sorts() != null) {
XContentParser completeSortParser = null; XContentParser completeSortParser = null;
@ -1178,8 +1180,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
try { try {
long now = System.nanoTime(); long now = System.nanoTime();
final IndexService indexService = indicesService.indexServiceSafe(indexShard.shardId().index().name()); final IndexService indexService = indicesService.indexServiceSafe(indexShard.shardId().index().name());
QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry()); QueryParseContext queryParseContext = indicesService.newQueryParserContext();
queryParseContext.parseFieldMatcher(indexService.queryParserService().parseFieldMatcher()); queryParseContext.parseFieldMatcher(indexService.getIndexSettings().getParseFieldMatcher());
ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexShard.getIndexSettings() ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexShard.getIndexSettings()
.getNumberOfShards(), .getNumberOfShards(),
SearchType.QUERY_THEN_FETCH, entry.source().build(queryParseContext), entry.types(), entry.requestCache()); SearchType.QUERY_THEN_FETCH, entry.source().build(queryParseContext), entry.types(), entry.requestCache());

View File

@ -40,7 +40,7 @@ public class FilterParser implements Aggregator.Parser {
@Override @Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ParsedQuery filter = context.queryParserService().parseInnerFilter(parser); ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
return new FilterAggregator.Factory(aggregationName, filter == null ? new MatchAllDocsQuery() : filter.query()); return new FilterAggregator.Factory(aggregationName, filter == null ? new MatchAllDocsQuery() : filter.query());
} }

View File

@ -82,7 +82,7 @@ public class FiltersParser implements Aggregator.Parser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
key = parser.currentName(); key = parser.currentName();
} else { } else {
ParsedQuery filter = context.queryParserService().parseInnerFilter(parser); ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
filters.add(new FiltersAggregator.KeyedFilter(key, filter == null ? Queries.newMatchAllQuery() : filter.query())); filters.add(new FiltersAggregator.KeyedFilter(key, filter == null ? Queries.newMatchAllQuery() : filter.query()));
} }
} }
@ -95,7 +95,7 @@ public class FiltersParser implements Aggregator.Parser {
keyed = false; keyed = false;
int idx = 0; int idx = 0;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
ParsedQuery filter = context.queryParserService().parseInnerFilter(parser); ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
filters.add(new FiltersAggregator.KeyedFilter(String.valueOf(idx), filter == null ? Queries.newMatchAllQuery() filters.add(new FiltersAggregator.KeyedFilter(String.valueOf(idx), filter == null ? Queries.newMatchAllQuery()
: filter.query())); : filter.query()));
idx++; idx++;

View File

@ -66,7 +66,7 @@ public class SignificantTermsParametersParser extends AbstractTermsParametersPar
if (significanceHeuristicParser != null) { if (significanceHeuristicParser != null) {
significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher(), context); significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher(), context);
} else if (context.parseFieldMatcher().match(currentFieldName, BACKGROUND_FILTER)) { } else if (context.parseFieldMatcher().match(currentFieldName, BACKGROUND_FILTER)) {
filter = context.queryParserService().parseInnerFilter(parser).query(); filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser).query();
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation()); + currentFieldName + "].", parser.getTokenLocation());

View File

@ -59,7 +59,7 @@ public class InnerHitsParseElement implements SearchParseElement {
@Override @Override
public void parse(XContentParser parser, SearchContext searchContext) throws Exception { public void parse(XContentParser parser, SearchContext searchContext) throws Exception {
QueryShardContext context = searchContext.queryParserService().getShardContext(); QueryShardContext context = searchContext.indexShard().getQueryShardContext();
context.reset(parser); context.reset(parser);
Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = parseInnerHits(parser, context, searchContext); Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = parseInnerHits(parser, context, searchContext);
if (innerHitsMap != null) { if (innerHitsMap != null) {
@ -149,7 +149,7 @@ public class InnerHitsParseElement implements SearchParseElement {
if (documentMapper == null) { if (documentMapper == null) {
throw new IllegalArgumentException("type [" + type + "] doesn't exist"); throw new IllegalArgumentException("type [" + type + "] doesn't exist");
} }
return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), context.mapperService(), documentMapper); return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), context.getMapperService(), documentMapper);
} }
private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryShardContext context, SearchContext searchContext, String nestedPath) throws Exception { private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryShardContext context, SearchContext searchContext, String nestedPath) throws Exception {
@ -178,7 +178,7 @@ public class InnerHitsParseElement implements SearchParseElement {
fieldName = parser.currentName(); fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(fieldName)) { if ("query".equals(fieldName)) {
Query q = searchContext.queryParserService().parseInnerQuery(context); Query q = context.parseInnerQuery();
query = new ParsedQuery(q, context.copyNamedQueries()); query = new ParsedQuery(q, context.copyNamedQueries());
} else if ("inner_hits".equals(fieldName)) { } else if ("inner_hits".equals(fieldName)) {
childInnerHits = parseInnerHits(parser, context, searchContext); childInnerHits = parseInnerHits(parser, context, searchContext);

View File

@ -22,7 +22,7 @@ package org.elasticsearch.search.highlight;
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner; import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
@ -66,13 +66,13 @@ public class HighlighterParseElement implements SearchParseElement {
@Override @Override
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws Exception {
try { try {
context.highlight(parse(parser, context.queryParserService())); context.highlight(parse(parser, context.indexShard().getQueryShardContext()));
} catch (IllegalArgumentException ex) { } catch (IllegalArgumentException ex) {
throw new SearchParseException(context, "Error while trying to parse Highlighter element in request", parser.getTokenLocation()); throw new SearchParseException(context, "Error while trying to parse Highlighter element in request", parser.getTokenLocation());
} }
} }
public SearchContextHighlight parse(XContentParser parser, IndexQueryParserService queryParserService) throws IOException { public SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
XContentParser.Token token; XContentParser.Token token;
String topLevelFieldName = null; String topLevelFieldName = null;
final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = new ArrayList<>(); final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = new ArrayList<>();
@ -111,7 +111,7 @@ public class HighlighterParseElement implements SearchParseElement {
} }
highlightFieldName = parser.currentName(); highlightFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryParserService))); fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
} }
} }
} else { } else {
@ -167,11 +167,11 @@ public class HighlighterParseElement implements SearchParseElement {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
highlightFieldName = parser.currentName(); highlightFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryParserService))); fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
} }
} }
} else if ("highlight_query".equals(topLevelFieldName) || "highlightQuery".equals(topLevelFieldName)) { } else if ("highlight_query".equals(topLevelFieldName) || "highlightQuery".equals(topLevelFieldName)) {
globalOptionsBuilder.highlightQuery(queryParserService.parse(parser).query()); globalOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
} }
} }
} }
@ -189,7 +189,7 @@ public class HighlighterParseElement implements SearchParseElement {
return new SearchContextHighlight(fields); return new SearchContextHighlight(fields);
} }
protected SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, IndexQueryParserService queryParserService) throws IOException { protected SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
XContentParser.Token token; XContentParser.Token token;
final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder(); final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
@ -252,7 +252,7 @@ public class HighlighterParseElement implements SearchParseElement {
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if ("highlight_query".equals(fieldName) || "highlightQuery".equals(fieldName)) { if ("highlight_query".equals(fieldName) || "highlightQuery".equals(fieldName)) {
fieldOptionsBuilder.highlightQuery(queryParserService.parse(parser).query()); fieldOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
} else if ("options".equals(fieldName)) { } else if ("options".equals(fieldName)) {
fieldOptionsBuilder.options(parser.map()); fieldOptionsBuilder.options(parser.map());
} }

View File

@ -35,14 +35,12 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.*;
import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
@ -193,7 +191,7 @@ public class DefaultSearchContext extends SearchContext {
} }
// initialize the filtering alias based on the provided filters // initialize the filtering alias based on the provided filters
aliasFilter = indexService.aliasFilter(request.filteringAliases()); aliasFilter = indexService.aliasFilter(indexShard.getQueryShardContext(), request.filteringAliases());
if (query() == null) { if (query() == null) {
parsedQuery(ParsedQuery.parsedMatchAllQuery()); parsedQuery(ParsedQuery.parsedMatchAllQuery());
@ -430,11 +428,6 @@ public class DefaultSearchContext extends SearchContext {
return indexService.analysisService(); return indexService.analysisService();
} }
@Override
public IndexQueryParserService queryParserService() {
return indexService.queryParserService();
}
@Override @Override
public SimilarityService similarityService() { public SimilarityService similarityService() {
return indexService.similarityService(); return indexService.similarityService();

View File

@ -34,7 +34,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
@ -257,11 +256,6 @@ public abstract class FilteredSearchContext extends SearchContext {
return in.analysisService(); return in.analysisService();
} }
@Override
public IndexQueryParserService queryParserService() {
return in.queryParserService();
}
@Override @Override
public SimilarityService similarityService() { public SimilarityService similarityService() {
return in.similarityService(); return in.similarityService();

View File

@ -40,7 +40,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
@ -206,8 +205,6 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple
public abstract AnalysisService analysisService(); public abstract AnalysisService analysisService();
public abstract IndexQueryParserService queryParserService();
public abstract SimilarityService similarityService(); public abstract SimilarityService similarityService();
public abstract ScriptService scriptService(); public abstract ScriptService scriptService();

View File

@ -33,7 +33,7 @@ public class FilterBinaryParseElement implements SearchParseElement {
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws Exception {
byte[] filterSource = parser.binaryValue(); byte[] filterSource = parser.binaryValue();
try (XContentParser fSourceParser = XContentFactory.xContent(filterSource).createParser(filterSource)) { try (XContentParser fSourceParser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
ParsedQuery filter = context.queryParserService().parseInnerFilter(fSourceParser); ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(fSourceParser);
if (filter != null) { if (filter != null) {
context.parsedPostFilter(filter); context.parsedPostFilter(filter);
} }

View File

@ -30,7 +30,7 @@ public class PostFilterParseElement implements SearchParseElement {
@Override @Override
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws Exception {
ParsedQuery postFilter = context.queryParserService().parseInnerFilter(parser); ParsedQuery postFilter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
if (postFilter != null) { if (postFilter != null) {
context.parsedPostFilter(postFilter); context.parsedPostFilter(postFilter);
} }

View File

@ -33,7 +33,7 @@ public class QueryBinaryParseElement implements SearchParseElement {
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws Exception {
byte[] querySource = parser.binaryValue(); byte[] querySource = parser.binaryValue();
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
context.parsedQuery(context.queryParserService().parse(qSourceParser)); context.parsedQuery(context.indexShard().getQueryShardContext().parse(qSourceParser));
} }
} }
} }

View File

@ -30,6 +30,6 @@ public class QueryParseElement implements SearchParseElement {
@Override @Override
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws Exception {
context.parsedQuery(context.queryParserService().parse(parser)); context.parsedQuery(context.indexShard().getQueryShardContext().parse(parser));
} }
} }

View File

@ -183,7 +183,7 @@ public final class QueryRescorer implements Rescorer {
private static final ObjectParser<QueryRescoreContext, SearchContext> RESCORE_PARSER = new ObjectParser<>("query", null); private static final ObjectParser<QueryRescoreContext, SearchContext> RESCORE_PARSER = new ObjectParser<>("query", null);
static { static {
RESCORE_PARSER.declareObject(QueryRescoreContext::setParsedQuery, (p, c) -> c.queryParserService().parse(p), new ParseField("rescore_query")); RESCORE_PARSER.declareObject(QueryRescoreContext::setParsedQuery, (p, c) -> c.indexShard().getQueryShardContext().parse(p), new ParseField("rescore_query"));
RESCORE_PARSER.declareFloat(QueryRescoreContext::setQueryWeight, new ParseField("query_weight")); RESCORE_PARSER.declareFloat(QueryRescoreContext::setQueryWeight, new ParseField("query_weight"));
RESCORE_PARSER.declareFloat(QueryRescoreContext::setRescoreQueryWeight, new ParseField("rescore_query_weight")); RESCORE_PARSER.declareFloat(QueryRescoreContext::setRescoreQueryWeight, new ParseField("rescore_query_weight"));
RESCORE_PARSER.declareString(QueryRescoreContext::setScoreMode, new ParseField("score_mode")); RESCORE_PARSER.declareString(QueryRescoreContext::setScoreMode, new ParseField("score_mode"));

View File

@ -72,7 +72,7 @@ public class GeoDistanceSortParser implements SortParser {
MultiValueMode sortMode = null; MultiValueMode sortMode = null;
NestedInnerQueryParseSupport nestedHelper = null; NestedInnerQueryParseSupport nestedHelper = null;
final boolean indexCreatedBeforeV2_0 = context.queryParserService().getIndexCreatedVersion().before(Version.V_2_0_0); final boolean indexCreatedBeforeV2_0 = context.indexShard().getIndexSettings().getIndexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false; boolean coerce = false;
boolean ignoreMalformed = false; boolean ignoreMalformed = false;

View File

@ -21,12 +21,10 @@ package org.elasticsearch.search.suggest;
import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import java.io.IOException; import java.io.IOException;
public interface SuggestContextParser { public interface SuggestContextParser {
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, HasContextAndHeaders headersContext) throws IOException;
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException;
} }

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
@ -45,13 +44,13 @@ public final class SuggestParseElement implements SearchParseElement {
@Override @Override
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws Exception {
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.queryParserService(), SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(),
context.shardTarget().index(), context.shardTarget().shardId(), context); context.shardTarget().index(), context.shardTarget().shardId(), context);
context.suggest(suggestionSearchContext); context.suggest(suggestionSearchContext);
} }
public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService,
IndexQueryParserService queryParserService, String index, int shardId, HasContextAndHeaders headersContext) throws IOException { String index, int shardId, HasContextAndHeaders headersContext) throws IOException {
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
BytesRef globalText = null; BytesRef globalText = null;
@ -90,7 +89,7 @@ public final class SuggestParseElement implements SearchParseElement {
throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported");
} }
final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser(); final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();
suggestionContext = contextParser.parse(parser, mapperService, queryParserService, headersContext); suggestionContext = contextParser.parse(parser, mapperService, headersContext);
} }
} }
if (suggestionContext != null) { if (suggestionContext != null) {

View File

@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.phrase.PhraseSuggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
@ -43,13 +44,13 @@ public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
} }
@Inject @Inject
public Suggesters(Map<String, Suggester> suggesters, ScriptService scriptService) { public Suggesters(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
this(addBuildIns(suggesters, scriptService)); this(addBuildIns(suggesters, scriptService, indexServices));
} }
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters, ScriptService scriptService) { private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
final Map<String, Suggester> map = new HashMap<>(); final Map<String, Suggester> map = new HashMap<>();
map.put("phrase", new PhraseSuggester(scriptService)); map.put("phrase", new PhraseSuggester(scriptService, indexServices));
map.put("term", new TermSuggester()); map.put("term", new TermSuggester());
map.put("completion", new CompletionSuggester()); map.put("completion", new CompletionSuggester());
map.putAll(suggesters); map.putAll(suggesters);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -26,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.context.ContextMapping.ContextQuery; import org.elasticsearch.search.suggest.context.ContextMapping.ContextQuery;
@ -48,9 +48,9 @@ public class CompletionSuggestParser implements SuggestContextParser {
} }
@Override @Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, HasContextAndHeaders headersContext) throws IOException {
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
XContentParser.Token token; XContentParser.Token token;
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
String fieldName = null; String fieldName = null;
CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester); CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester);
@ -60,7 +60,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName(); fieldName = parser.currentName();
} else if (token.isValue()) { } else if (token.isValue()) {
if (!parseSuggestContext(parser, mapperService, fieldName, suggestion, queryParserService.parseFieldMatcher())) { if (!parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) {
if (token == XContentParser.Token.VALUE_BOOLEAN && "fuzzy".equals(fieldName)) { if (token == XContentParser.Token.VALUE_BOOLEAN && "fuzzy".equals(fieldName)) {
suggestion.setFuzzy(parser.booleanValue()); suggestion.setFuzzy(parser.booleanValue());
} }
@ -73,7 +73,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
fuzzyConfigName = parser.currentName(); fuzzyConfigName = parser.currentName();
} else if (token.isValue()) { } else if (token.isValue()) {
if (queryParserService.parseFieldMatcher().match(fuzzyConfigName, Fuzziness.FIELD)) { if (parseFieldMatcher.match(fuzzyConfigName, Fuzziness.FIELD)) {
suggestion.setFuzzyEditDistance(Fuzziness.parse(parser).asDistance()); suggestion.setFuzzyEditDistance(Fuzziness.parse(parser).asDistance());
} else if ("transpositions".equals(fuzzyConfigName)) { } else if ("transpositions".equals(fuzzyConfigName)) {
suggestion.setFuzzyTranspositions(parser.booleanValue()); suggestion.setFuzzyTranspositions(parser.booleanValue());

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.Template; import org.elasticsearch.script.Template;
@ -50,9 +49,9 @@ public final class PhraseSuggestParser implements SuggestContextParser {
@Override @Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException { HasContextAndHeaders headersContext) throws IOException {
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester); PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
suggestion.setQueryParserService(queryParserService); ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;
boolean gramSizeSet = false; boolean gramSizeSet = false;
@ -60,7 +59,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName(); fieldName = parser.currentName();
} else if (token.isValue()) { } else if (token.isValue()) {
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, queryParserService.parseFieldMatcher())) { if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) {
if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) { if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) {
suggestion.setRealWordErrorLikelihood(parser.floatValue()); suggestion.setRealWordErrorLikelihood(parser.floatValue());
if (suggestion.realworldErrorLikelyhood() <= 0.0) { if (suggestion.realworldErrorLikelyhood() <= 0.0) {
@ -106,7 +105,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
fieldName = parser.currentName(); fieldName = parser.currentName();
} }
if (token.isValue()) { if (token.isValue()) {
parseCandidateGenerator(parser, mapperService, fieldName, generator, queryParserService.parseFieldMatcher()); parseCandidateGenerator(parser, mapperService, fieldName, generator, parseFieldMatcher);
} }
} }
verifyGenerator(generator); verifyGenerator(generator);
@ -141,7 +140,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
if (suggestion.getCollateQueryScript() != null) { if (suggestion.getCollateQueryScript() != null) {
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]"); throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
} }
Template template = Template.parse(parser, queryParserService.parseFieldMatcher()); Template template = Template.parse(parser, parseFieldMatcher);
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH, CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH,
headersContext); headersContext);
suggestion.setCollateQueryScript(compiledScript); suggestion.setCollateQueryScript(compiledScript);

View File

@ -29,12 +29,13 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
@ -55,10 +56,11 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
private final BytesRef SEPARATOR = new BytesRef(" "); private final BytesRef SEPARATOR = new BytesRef(" ");
private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion"; private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion";
private final ScriptService scriptService; private final ScriptService scriptService;
private final IndicesService indicesService;
@Inject public PhraseSuggester(ScriptService scriptService, IndicesService indicesService) {
public PhraseSuggester(ScriptService scriptService) {
this.scriptService = scriptService; this.scriptService = scriptService;
this.indicesService = indicesService;
} }
/* /*
@ -117,7 +119,9 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
final ExecutableScript executable = scriptService.executable(collateScript, vars); final ExecutableScript executable = scriptService.executable(collateScript, vars);
final BytesReference querySource = (BytesReference) executable.run(); final BytesReference querySource = (BytesReference) executable.run();
final ParsedQuery parsedQuery = suggestion.getQueryParserService().parse(querySource); IndexService indexService = indicesService.indexService(suggestion.getIndex());
IndexShard shard = indexService.getShard(suggestion.getShard());
final ParsedQuery parsedQuery = shard.getQueryShardContext().parse(querySource);
collateMatch = Lucene.exists(searcher, parsedQuery.query()); collateMatch = Lucene.exists(searcher, parsedQuery.query());
} }
if (!collateMatch && !collatePrune) { if (!collateMatch && !collatePrune) {

View File

@ -25,7 +25,6 @@ import java.util.Map;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggester;
@ -33,7 +32,6 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContex
class PhraseSuggestionContext extends SuggestionContext { class PhraseSuggestionContext extends SuggestionContext {
private final BytesRef SEPARATOR = new BytesRef(" "); private final BytesRef SEPARATOR = new BytesRef(" ");
private IndexQueryParserService queryParserService;
private float maxErrors = 0.5f; private float maxErrors = 0.5f;
private BytesRef separator = SEPARATOR; private BytesRef separator = SEPARATOR;
private float realworldErrorLikelihood = 0.95f; private float realworldErrorLikelihood = 0.95f;
@ -112,14 +110,6 @@ class PhraseSuggestionContext extends SuggestionContext {
return scorer; return scorer;
} }
public void setQueryParserService(IndexQueryParserService queryParserService) {
this.queryParserService = queryParserService;
}
public IndexQueryParserService getQueryParserService() {
return queryParserService;
}
static class DirectCandidateGenerator extends DirectSpellcheckerSettings { static class DirectCandidateGenerator extends DirectSpellcheckerSettings {
private Analyzer preFilter; private Analyzer preFilter;
private Analyzer postFilter; private Analyzer postFilter;

View File

@ -22,7 +22,6 @@ import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestUtils;
@ -40,7 +39,7 @@ public final class TermSuggestParser implements SuggestContextParser {
@Override @Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException { HasContextAndHeaders headersContext) throws IOException {
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;
TermSuggestionContext suggestion = new TermSuggestionContext(suggester); TermSuggestionContext suggestion = new TermSuggestionContext(suggester);
@ -49,7 +48,7 @@ public final class TermSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName(); fieldName = parser.currentName();
} else if (token.isValue()) { } else if (token.isValue()) {
parseTokenValue(parser, mapperService, fieldName, suggestion, settings, queryParserService.parseFieldMatcher()); parseTokenValue(parser, mapperService, fieldName, suggestion, settings, mapperService.getIndexSettings().getParseFieldMatcher());
} else { } else {
throw new IllegalArgumentException("suggester[term] doesn't support field [" + fieldName + "]"); throw new IllegalArgumentException("suggester[term] doesn't support field [" + fieldName + "]");
} }

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.InvalidAliasNameException; import org.elasticsearch.indices.InvalidAliasNameException;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -73,6 +74,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
public void testFilteringAliases() throws Exception { public void testFilteringAliases() throws Exception {
IndexService indexService = newIndexService(); IndexService indexService = newIndexService();
IndexShard shard = indexService.getShard(0);
add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "cats", filter(termQuery("animal", "cat")));
add(indexService, "dogs", filter(termQuery("animal", "dog"))); add(indexService, "dogs", filter(termQuery("animal", "dog")));
add(indexService, "all", null); add(indexService, "all", null);
@ -81,41 +83,44 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
assertThat(indexService.getMetaData().getAliases().containsKey("dogs"), equalTo(true)); assertThat(indexService.getMetaData().getAliases().containsKey("dogs"), equalTo(true));
assertThat(indexService.getMetaData().getAliases().containsKey("turtles"), equalTo(false)); assertThat(indexService.getMetaData().getAliases().containsKey("turtles"), equalTo(false));
assertThat(indexService.aliasFilter("cats").toString(), equalTo("animal:cat")); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats").toString(), equalTo("animal:cat"));
assertThat(indexService.aliasFilter("cats", "dogs").toString(), equalTo("animal:cat animal:dog")); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats", "dogs").toString(), equalTo("animal:cat animal:dog"));
// Non-filtering alias should turn off all filters because filters are ORed // Non-filtering alias should turn off all filters because filters are ORed
assertThat(indexService.aliasFilter("all"), nullValue()); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "all"), nullValue());
assertThat(indexService.aliasFilter("cats", "all"), nullValue()); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats", "all"), nullValue());
assertThat(indexService.aliasFilter("all", "cats"), nullValue()); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "all", "cats"), nullValue());
add(indexService, "cats", filter(termQuery("animal", "feline"))); add(indexService, "cats", filter(termQuery("animal", "feline")));
add(indexService, "dogs", filter(termQuery("animal", "canine"))); add(indexService, "dogs", filter(termQuery("animal", "canine")));
assertThat(indexService.aliasFilter("dogs", "cats").toString(), equalTo("animal:canine animal:feline")); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
} }
public void testAliasFilters() throws Exception { public void testAliasFilters() throws Exception {
IndexService indexService = newIndexService(); IndexService indexService = newIndexService();
IndexShard shard = indexService.getShard(0);
add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "cats", filter(termQuery("animal", "cat")));
add(indexService, "dogs", filter(termQuery("animal", "dog"))); add(indexService, "dogs", filter(termQuery("animal", "dog")));
assertThat(indexService.aliasFilter(), nullValue()); assertThat(indexService.aliasFilter(shard.getQueryShardContext()), nullValue());
assertThat(indexService.aliasFilter("dogs").toString(), equalTo("animal:dog")); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs").toString(), equalTo("animal:dog"));
assertThat(indexService.aliasFilter("dogs", "cats").toString(), equalTo("animal:dog animal:cat")); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:dog animal:cat"));
add(indexService, "cats", filter(termQuery("animal", "feline"))); add(indexService, "cats", filter(termQuery("animal", "feline")));
add(indexService, "dogs", filter(termQuery("animal", "canine"))); add(indexService, "dogs", filter(termQuery("animal", "canine")));
assertThat(indexService.aliasFilter("dogs", "cats").toString(), equalTo("animal:canine animal:feline")); assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
} }
public void testRemovedAliasFilter() throws Exception { public void testRemovedAliasFilter() throws Exception {
IndexService indexService = newIndexService(); IndexService indexService = newIndexService();
IndexShard shard = indexService.getShard(0);
add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "cats", filter(termQuery("animal", "cat")));
remove(indexService, "cats"); remove(indexService, "cats");
try { try {
indexService.aliasFilter("cats"); indexService.aliasFilter(shard.getQueryShardContext(), "cats");
fail("Expected InvalidAliasNameException"); fail("Expected InvalidAliasNameException");
} catch (InvalidAliasNameException e) { } catch (InvalidAliasNameException e) {
assertThat(e.getMessage(), containsString("Invalid alias name [cats]")); assertThat(e.getMessage(), containsString("Invalid alias name [cats]"));
@ -124,11 +129,13 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
public void testUnknownAliasFilter() throws Exception { public void testUnknownAliasFilter() throws Exception {
IndexService indexService = newIndexService(); IndexService indexService = newIndexService();
IndexShard shard = indexService.getShard(0);
add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "cats", filter(termQuery("animal", "cat")));
add(indexService, "dogs", filter(termQuery("animal", "dog"))); add(indexService, "dogs", filter(termQuery("animal", "dog")));
try { try {
indexService.aliasFilter("unknown"); indexService.aliasFilter(shard.getQueryShardContext(), "unknown");
fail(); fail();
} catch (InvalidAliasNameException e) { } catch (InvalidAliasNameException e) {
// all is well // all is well

View File

@ -66,6 +66,7 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.none.NoneQueryCache; import org.elasticsearch.index.cache.query.none.NoneQueryCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper;
@ -75,6 +76,7 @@ import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.*; import org.elasticsearch.script.*;
import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.script.mustache.MustacheScriptEngineService;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
@ -122,10 +124,17 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
private static final int NUMBER_OF_TESTQUERIES = 20; private static final int NUMBER_OF_TESTQUERIES = 20;
private static Injector injector; private static Injector injector;
private static IndexQueryParserService queryParserService; private static IndicesQueriesRegistry indicesQueriesRegistry;
private static QueryShardContext queryShardContext;
private static IndexFieldDataService indexFieldDataService;
protected static IndexQueryParserService queryParserService() {
return queryParserService; protected static QueryShardContext queryShardContext() {
return queryShardContext;
}
protected static IndexFieldDataService indexFieldDataService() {
return indexFieldDataService;
} }
private static Index index; private static Index index;
@ -233,9 +242,13 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
} }
} }
).createInjector(); ).createInjector();
queryParserService = injector.getInstance(IndexQueryParserService.class); SimilarityService similarityService = injector.getInstance(SimilarityService.class);
indexFieldDataService = injector.getInstance(IndexFieldDataService.class);
MapperService mapperService = queryParserService.mapperService; ScriptService scriptService = injector.getInstance(ScriptService.class);
MapperService mapperService = injector.getInstance(MapperService.class);
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
queryShardContext = new QueryShardContext(idxSettings, proxy, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
//create some random type with some default field, those types will stick around for all of the subclasses //create some random type with some default field, those types will stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)]; currentTypes = new String[randomIntBetween(0, 5)];
for (int i = 0; i < currentTypes.length; i++) { for (int i = 0; i < currentTypes.length; i++) {
@ -264,10 +277,12 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
terminate(injector.getInstance(ThreadPool.class)); terminate(injector.getInstance(ThreadPool.class));
injector = null; injector = null;
index = null; index = null;
queryParserService = null; queryShardContext = null;
currentTypes = null; currentTypes = null;
namedWriteableRegistry = null; namedWriteableRegistry = null;
randomTypes = null; randomTypes = null;
indicesQueriesRegistry = null;
indexFieldDataService = null;
} }
@Before @Before
@ -542,7 +557,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
} }
private QueryParser<?> queryParser(String queryId) { private QueryParser<?> queryParser(String queryId) {
return queryParserService.indicesQueriesRegistry().queryParsers().get(queryId); return indicesQueriesRegistry.queryParsers().get(queryId);
} }
//we use the streaming infra to create a copy of the query provided as argument //we use the streaming infra to create a copy of the query provided as argument
@ -562,7 +577,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
* @return a new {@link QueryShardContext} based on the base test index and queryParserService * @return a new {@link QueryShardContext} based on the base test index and queryParserService
*/ */
protected static QueryShardContext createShardContext() { protected static QueryShardContext createShardContext() {
QueryShardContext queryCreationContext = new QueryShardContext(queryParserService); QueryShardContext queryCreationContext = queryShardContext.clone();
queryCreationContext.reset(); queryCreationContext.reset();
queryCreationContext.parseFieldMatcher(ParseFieldMatcher.STRICT); queryCreationContext.parseFieldMatcher(ParseFieldMatcher.STRICT);
return queryCreationContext; return queryCreationContext;
@ -572,7 +587,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
* @return a new {@link QueryParseContext} based on the base test index and queryParserService * @return a new {@link QueryParseContext} based on the base test index and queryParserService
*/ */
protected static QueryParseContext createParseContext() { protected static QueryParseContext createParseContext() {
QueryParseContext queryParseContext = new QueryParseContext(queryParserService.indicesQueriesRegistry()); QueryParseContext queryParseContext = new QueryParseContext(indicesQueriesRegistry);
queryParseContext.reset(null); queryParseContext.reset(null);
queryParseContext.parseFieldMatcher(ParseFieldMatcher.STRICT); queryParseContext.parseFieldMatcher(ParseFieldMatcher.STRICT);
return queryParseContext; return queryParseContext;

View File

@ -54,7 +54,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
MapperService mapperService = queryParserService().mapperService; MapperService mapperService = queryShardContext().getMapperService();
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=string", STRING_FIELD_NAME, "type=string",
INT_FIELD_NAME, "type=integer", INT_FIELD_NAME, "type=integer",
@ -75,8 +75,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
} }
protected void setSearchContext(String[] types) { protected void setSearchContext(String[] types) {
final MapperService mapperService = queryParserService().mapperService; final MapperService mapperService = queryShardContext().getMapperService();
final IndexFieldDataService fieldData = queryParserService().fieldDataService; final IndexFieldDataService fieldData = indexFieldDataService();
TestSearchContext testSearchContext = new TestSearchContext() { TestSearchContext testSearchContext = new TestSearchContext() {
private InnerHitsContext context; private InnerHitsContext context;

View File

@ -49,7 +49,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
MapperService mapperService = queryParserService().mapperService; MapperService mapperService = queryShardContext().getMapperService();
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=string", STRING_FIELD_NAME, "type=string",
INT_FIELD_NAME, "type=integer", INT_FIELD_NAME, "type=integer",
@ -70,8 +70,8 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
} }
protected void setSearchContext(String[] types) { protected void setSearchContext(String[] types) {
final MapperService mapperService = queryParserService().mapperService; final MapperService mapperService = queryShardContext().getMapperService();
final IndexFieldDataService fieldData = queryParserService().fieldDataService; final IndexFieldDataService fieldData = indexFieldDataService();
TestSearchContext testSearchContext = new TestSearchContext() { TestSearchContext testSearchContext = new TestSearchContext() {
private InnerHitsContext context; private InnerHitsContext context;

View File

@ -43,7 +43,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
MapperService mapperService = queryParserService().mapperService; MapperService mapperService = queryShardContext().getMapperService();
mapperService.merge("nested_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("nested_doc", mapperService.merge("nested_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("nested_doc",
STRING_FIELD_NAME, "type=string", STRING_FIELD_NAME, "type=string",
INT_FIELD_NAME, "type=integer", INT_FIELD_NAME, "type=integer",
@ -57,8 +57,8 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
@Override @Override
protected void setSearchContext(String[] types) { protected void setSearchContext(String[] types) {
final MapperService mapperService = queryParserService().mapperService; final MapperService mapperService = queryShardContext().getMapperService();
final IndexFieldDataService fieldData = queryParserService().fieldDataService; final IndexFieldDataService fieldData = indexFieldDataService();
TestSearchContext testSearchContext = new TestSearchContext() { TestSearchContext testSearchContext = new TestSearchContext() {
private InnerHitsContext context; private InnerHitsContext context;

View File

@ -58,7 +58,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString()); query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
// Create timestamp option only then we have a date mapper, // Create timestamp option only then we have a date mapper,
// otherwise we could trigger exception. // otherwise we could trigger exception.
if (createShardContext().mapperService().smartNameFieldType(DATE_FIELD_NAME) != null) { if (createShardContext().getMapperService().smartNameFieldType(DATE_FIELD_NAME) != null) {
if (randomBoolean()) { if (randomBoolean()) {
query.timeZone(randomTimeZone()); query.timeZone(randomTimeZone());
} }

View File

@ -267,7 +267,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
// the remaining tests requires either a mapping that we register with types in base test setup // the remaining tests requires either a mapping that we register with types in base test setup
// no strict field resolution (version before V_1_4_0_Beta1) // no strict field resolution (version before V_1_4_0_Beta1)
if (getCurrentTypes().length > 0 || shardContext.indexQueryParserService().getIndexCreatedVersion().before(Version.V_1_4_0_Beta1)) { if (getCurrentTypes().length > 0 || shardContext.indexVersionCreated().before(Version.V_1_4_0_Beta1)) {
Query luceneQuery = queryBuilder.toQuery(shardContext); Query luceneQuery = queryBuilder.toQuery(shardContext);
assertThat(luceneQuery, instanceOf(BooleanQuery.class)); assertThat(luceneQuery, instanceOf(BooleanQuery.class));
TermQuery termQuery = (TermQuery) ((BooleanQuery) luceneQuery).clauses().get(0).getQuery(); TermQuery termQuery = (TermQuery) ((BooleanQuery) luceneQuery).clauses().get(0).getQuery();

View File

@ -44,13 +44,17 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.none.NoneQueryCache; import org.elasticsearch.index.cache.query.none.NoneQueryCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -121,9 +125,13 @@ public class TemplateQueryParserTests extends ESTestCase {
} }
} }
).createInjector(); ).createInjector();
SimilarityService similarityService = injector.getInstance(SimilarityService.class);
IndexQueryParserService queryParserService = injector.getInstance(IndexQueryParserService.class); IndexFieldDataService indexFieldDataService = injector.getInstance(IndexFieldDataService.class);
context = new QueryShardContext(queryParserService); ScriptService scriptService = injector.getInstance(ScriptService.class);
MapperService mapperService = injector.getInstance(MapperService.class);
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
context = new QueryShardContext(idxSettings, proxy, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
} }
@Override @Override

View File

@ -56,7 +56,7 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase<WrapperQuery
@Override @Override
protected void doAssertLuceneQuery(WrapperQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(WrapperQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
try (XContentParser qSourceParser = XContentFactory.xContent(queryBuilder.source()).createParser(queryBuilder.source())) { try (XContentParser qSourceParser = XContentFactory.xContent(queryBuilder.source()).createParser(queryBuilder.source())) {
final QueryShardContext contextCopy = new QueryShardContext(context.indexQueryParserService()); final QueryShardContext contextCopy = context.clone();
contextCopy.reset(qSourceParser); contextCopy.reset(qSourceParser);
QueryBuilder<?> innerQuery = contextCopy.parseContext().parseInnerQueryBuilder(); QueryBuilder<?> innerQuery = contextCopy.parseContext().parseInnerQueryBuilder();
Query expected = innerQuery.toQuery(context); Query expected = innerQuery.toQuery(context);

View File

@ -24,7 +24,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
@ -66,15 +66,14 @@ public class CustomQueryParserIT extends ESIntegTestCase {
assertHitCount(client().prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryParserPlugin.DummyQueryBuilder())).get(), 1l); assertHitCount(client().prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryParserPlugin.DummyQueryBuilder())).get(), 1l);
} }
private static IndexQueryParserService queryParser() { private static QueryShardContext queryShardContext() {
IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class); IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class);
return indicesService.indexServiceSafe("index").queryParserService(); return indicesService.indexServiceSafe("index").getQueryShardContext();
} }
//see #11120 //see #11120
public void testConstantScoreParsesFilter() throws Exception { public void testConstantScoreParsesFilter() throws Exception {
IndexQueryParserService queryParser = queryParser(); Query q = constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryShardContext());
Query q = constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
Query inner = ((ConstantScoreQuery) q).getQuery(); Query inner = ((ConstantScoreQuery) q).getQuery();
assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class)); assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class));
assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter); assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter);
@ -82,13 +81,12 @@ public class CustomQueryParserIT extends ESIntegTestCase {
//see #11120 //see #11120
public void testBooleanParsesFilter() throws Exception { public void testBooleanParsesFilter() throws Exception {
IndexQueryParserService queryParser = queryParser();
// single clause, serialized as inner object // single clause, serialized as inner object
Query q = boolQuery() Query q = boolQuery()
.should(new DummyQueryParserPlugin.DummyQueryBuilder()) .should(new DummyQueryParserPlugin.DummyQueryBuilder())
.must(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder())
.filter(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder())
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext()); .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryShardContext());
assertThat(q, instanceOf(BooleanQuery.class)); assertThat(q, instanceOf(BooleanQuery.class));
BooleanQuery bq = (BooleanQuery) q; BooleanQuery bq = (BooleanQuery) q;
assertEquals(4, bq.clauses().size()); assertEquals(4, bq.clauses().size());
@ -113,7 +111,7 @@ public class CustomQueryParserIT extends ESIntegTestCase {
.should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder()) .should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder())
.must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder())
.filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder())
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext()); .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryShardContext());
assertThat(q, instanceOf(BooleanQuery.class)); assertThat(q, instanceOf(BooleanQuery.class));
bq = (BooleanQuery) q; bq = (BooleanQuery) q;
assertEquals(8, bq.clauses().size()); assertEquals(8, bq.clauses().size());

View File

@ -22,11 +22,9 @@ package org.elasticsearch.index.search;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -39,7 +37,6 @@ import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
public class MultiMatchQueryTests extends ESSingleNodeTestCase { public class MultiMatchQueryTests extends ESSingleNodeTestCase {
private IndexQueryParserService queryParser;
private IndexService indexService; private IndexService indexService;
@Before @Before
@ -64,11 +61,10 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
"}"; "}";
mapperService.merge("person", new CompressedXContent(mapping), true, false); mapperService.merge("person", new CompressedXContent(mapping), true, false);
this.indexService = indexService; this.indexService = indexService;
queryParser = indexService.queryParserService();
} }
public void testCrossFieldMultiMatchQuery() throws IOException { public void testCrossFieldMultiMatchQuery() throws IOException {
QueryShardContext queryShardContext = new QueryShardContext(queryParser); QueryShardContext queryShardContext = indexService.getShard(0).getQueryShardContext();
queryShardContext.setAllowUnmappedFields(true); queryShardContext.setAllowUnmappedFields(true);
Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {

View File

@ -1015,8 +1015,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
ShardRouting routing = new ShardRouting(shard.routingEntry()); ShardRouting routing = new ShardRouting(shard.routingEntry());
shard.close("simon says", true); shard.close("simon says", true);
IndexServicesProvider indexServices = indexService.getIndexServices(); IndexServicesProvider indexServices = indexService.getIndexServices();
IndexServicesProvider newProvider = new IndexServicesProvider(indexServices.getIndexEventListener(), indexServices.getThreadPool(), indexServices.getMapperService(), indexServices.getQueryParserService(), indexServices.getIndexCache(), indexServices.getIndicesQueryCache(), indexServices.getCodecService(), indexServices.getTermVectorsService(), indexServices.getIndexFieldDataService(), indexServices.getWarmer(), indexServices.getSimilarityService(), indexServices.getFactory(), indexServices.getBigArrays(), indexServices.getIndexingMemoryController()); IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), shard.store(), wrapper, indexServices);
IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), shard.store(), wrapper, newProvider);
ShardRoutingHelper.reinit(routing); ShardRoutingHelper.reinit(routing);
newShard.updateRoutingEntry(routing, false); newShard.updateRoutingEntry(routing, false);
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT); DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);

View File

@ -20,11 +20,7 @@ package org.elasticsearch.search.suggest;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import java.io.IOException; import java.io.IOException;
import java.util.Locale; import java.util.Locale;
@ -58,15 +54,11 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
@Override @Override
public SuggestContextParser getContextParser() { public SuggestContextParser getContextParser() {
return new SuggestContextParser() { return (parser, mapperService, headersContext) -> {
@Override Map<String, Object> options = parser.map();
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException { suggestionContext.setField((String) options.get("field"));
Map<String, Object> options = parser.map(); return suggestionContext;
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
suggestionContext.setField((String) options.get("field"));
return suggestionContext;
}
}; };
} }

View File

@ -41,7 +41,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
@ -311,14 +310,7 @@ public class TestSearchContext extends SearchContext {
} }
@Override @Override
public AnalysisService analysisService() { public AnalysisService analysisService() { return indexService.analysisService();}
return indexService.analysisService();
}
@Override
public IndexQueryParserService queryParserService() {
return indexService.queryParserService();
}
@Override @Override
public SimilarityService similarityService() { public SimilarityService similarityService() {