Cleanup query parsing and remove IndexQueryParserService
IndexQueryParserService is only a factory for QueryShardContext instances which are not even bound to a shard. The service only forwards dependencies and even references node level service directly which makes dependency seperation on shard, index and node level hard. This commit removes the service entirely, folds the creation of QueryShardContext into IndexShard which is it's logical place and detaches the ClusterService needed for index name matching during query parsing with a simple predicate interface on IndexSettings.
This commit is contained in:
parent
b56bbf62dd
commit
4176964358
|
@ -119,7 +119,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
|
|||
}
|
||||
if (field == null) {
|
||||
if (indexService != null) {
|
||||
field = indexService.queryParserService().defaultField();
|
||||
field = indexService.getIndexSettings().getDefaultField();
|
||||
} else {
|
||||
field = AllFieldMapper.NAME;
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
|
@ -162,8 +162,8 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
@Override
|
||||
protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) {
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
|
||||
IndexQueryParserService queryParserService = indexService.queryParserService();
|
||||
IndexShard indexShard = indexService.getShard(request.shardId().id());
|
||||
QueryShardContext queryParserService = indexShard.getQueryShardContext();
|
||||
|
||||
boolean valid;
|
||||
String explanation = null;
|
||||
|
|
|
@ -121,7 +121,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
SearchContext.setCurrent(context);
|
||||
|
||||
try {
|
||||
context.parsedQuery(indexService.queryParserService().toQuery(request.query()));
|
||||
context.parsedQuery(indexShard.getQueryShardContext().toQuery(request.query()));
|
||||
context.preProcess();
|
||||
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase;
|
||||
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
|
||||
|
|
|
@ -142,8 +142,7 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
|
|||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("suggest content missing");
|
||||
}
|
||||
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(),
|
||||
indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id(), request);
|
||||
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), request.shardId().getIndex(), request.shardId().id(), request);
|
||||
final Suggest result = suggestPhase.execute(context, searcher.searcher());
|
||||
return new ShardSuggestResponse(request.shardId(), result);
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.indices.InvalidAliasNameException;
|
||||
|
||||
|
@ -113,14 +112,14 @@ public class AliasValidator extends AbstractComponent {
|
|||
|
||||
/**
|
||||
* Validates an alias filter by parsing it using the
|
||||
* provided {@link org.elasticsearch.index.query.IndexQueryParserService}
|
||||
* provided {@link org.elasticsearch.index.query.QueryShardContext}
|
||||
* @throws IllegalArgumentException if the filter is not valid
|
||||
*/
|
||||
public void validateAliasFilter(String alias, String filter, IndexQueryParserService indexQueryParserService) {
|
||||
assert indexQueryParserService != null;
|
||||
public void validateAliasFilter(String alias, String filter, QueryShardContext queryShardContext) {
|
||||
assert queryShardContext != null;
|
||||
try {
|
||||
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
|
||||
validateAliasFilter(parser, indexQueryParserService);
|
||||
validateAliasFilter(parser, queryShardContext);
|
||||
} catch (Throwable e) {
|
||||
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
|
||||
}
|
||||
|
@ -128,26 +127,25 @@ public class AliasValidator extends AbstractComponent {
|
|||
|
||||
/**
|
||||
* Validates an alias filter by parsing it using the
|
||||
* provided {@link org.elasticsearch.index.query.IndexQueryParserService}
|
||||
* provided {@link org.elasticsearch.index.query.QueryShardContext}
|
||||
* @throws IllegalArgumentException if the filter is not valid
|
||||
*/
|
||||
public void validateAliasFilter(String alias, byte[] filter, IndexQueryParserService indexQueryParserService) {
|
||||
assert indexQueryParserService != null;
|
||||
public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext) {
|
||||
assert queryShardContext != null;
|
||||
try {
|
||||
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
|
||||
validateAliasFilter(parser, indexQueryParserService);
|
||||
validateAliasFilter(parser, queryShardContext);
|
||||
} catch (Throwable e) {
|
||||
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateAliasFilter(XContentParser parser, IndexQueryParserService indexQueryParserService) throws IOException {
|
||||
QueryShardContext context = indexQueryParserService.getShardContext();
|
||||
private void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
try {
|
||||
context.reset(parser);
|
||||
context.parseContext().parseInnerQueryBuilder().toFilter(context);
|
||||
queryShardContext.reset(parser);
|
||||
queryShardContext.parseContext().parseInnerQueryBuilder().toFilter(queryShardContext);
|
||||
} finally {
|
||||
context.reset(null);
|
||||
queryShardContext.reset(null);
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -806,4 +806,17 @@ public class IndexNameExpressionResolver extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> iff the given expression resolves to the given index name otherwise <code>false</code>
|
||||
*/
|
||||
public final boolean matchesIndex(String indexName, String expression, ClusterState state) {
|
||||
final String[] concreteIndices = concreteIndices(state, IndicesOptions.lenientExpandOpen(), expression);
|
||||
for (String index : concreteIndices) {
|
||||
if (Regex.simpleMatch(index, indexName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return indexName.equals(expression);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.util.CollectionUtil;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
|
@ -41,48 +40,37 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.ValidationException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.indices.IndexAlreadyExistsException;
|
||||
import org.elasticsearch.indices.IndexCreationException;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.InvalidIndexNameException;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE;
|
||||
|
@ -334,15 +322,15 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
IndexQueryParserService indexQueryParserService = indexService.queryParserService();
|
||||
QueryShardContext queryShardContext = indexService.getQueryShardContext();
|
||||
for (Alias alias : request.aliases()) {
|
||||
if (Strings.hasLength(alias.filter())) {
|
||||
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), indexQueryParserService);
|
||||
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext);
|
||||
}
|
||||
}
|
||||
for (AliasMetaData aliasMetaData : templatesAliases.values()) {
|
||||
if (aliasMetaData.filter() != null) {
|
||||
aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), indexQueryParserService);
|
||||
aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
indices.put(indexMetaData.getIndex(), indexService);
|
||||
}
|
||||
|
||||
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.queryParserService());
|
||||
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.getQueryShardContext());
|
||||
}
|
||||
AliasMetaData newAliasMd = AliasMetaData.newAliasMetaDataBuilder(
|
||||
aliasAction.alias())
|
||||
|
|
|
@ -20,23 +20,22 @@
|
|||
package org.elasticsearch.common;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
|
||||
import java.util.EnumSet;
|
||||
|
||||
/**
|
||||
* Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField}
|
||||
* against a field name and throw deprecation exception depending on the current value of the {@link IndexQueryParserService#PARSE_STRICT} setting.
|
||||
* against a field name and throw deprecation exception depending on the current value of the {@link #PARSE_STRICT} setting.
|
||||
*/
|
||||
public class ParseFieldMatcher {
|
||||
|
||||
public static final String PARSE_STRICT = "index.query.parse.strict";
|
||||
public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(ParseField.EMPTY_FLAGS);
|
||||
public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(ParseField.STRICT_FLAGS);
|
||||
|
||||
private final EnumSet<ParseField.Flag> parseFlags;
|
||||
|
||||
public ParseFieldMatcher(Settings settings) {
|
||||
if (settings.getAsBoolean(IndexQueryParserService.PARSE_STRICT, false)) {
|
||||
if (settings.getAsBoolean(PARSE_STRICT, false)) {
|
||||
this.parseFlags = EnumSet.of(ParseField.Flag.STRICT);
|
||||
} else {
|
||||
this.parseFlags = ParseField.EMPTY_FLAGS;
|
||||
|
@ -49,7 +48,7 @@ public class ParseFieldMatcher {
|
|||
|
||||
/**
|
||||
* Matches a {@link ParseField} against a field name, and throws deprecation exception depending on the current
|
||||
* value of the {@link IndexQueryParserService#PARSE_STRICT} setting.
|
||||
* value of the {@link #PARSE_STRICT} setting.
|
||||
* @param fieldName the field name found in the request while parsing
|
||||
* @param parseField the parse field that we are looking for
|
||||
* @throws IllegalArgumentException whenever we are in strict mode and the request contained a deprecated field
|
||||
|
|
|
@ -222,8 +222,9 @@ public final class IndexModule extends AbstractModule {
|
|||
|
||||
@Override
|
||||
protected void configure() {
|
||||
final IndexSettings settings = indexSettings.newWithListener(settingsConsumers);
|
||||
try {
|
||||
bind(AnalysisService.class).toInstance(analysisRegistry.build(indexSettings));
|
||||
bind(AnalysisService.class).toInstance(analysisRegistry.build(settings));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("can't create analysis service", e);
|
||||
}
|
||||
|
@ -234,7 +235,6 @@ public final class IndexModule extends AbstractModule {
|
|||
bind(IndexServicesProvider.class).asEagerSingleton();
|
||||
bind(MapperService.class).asEagerSingleton();
|
||||
bind(IndexFieldDataService.class).asEagerSingleton();
|
||||
final IndexSettings settings = new IndexSettings(indexSettings.getIndexMetaData(), indexSettings.getNodeSettings(), settingsConsumers);
|
||||
bind(IndexSettings.class).toInstance(settings);
|
||||
|
||||
final String storeType = settings.getSettings().get(STORE_TYPE);
|
||||
|
|
|
@ -43,8 +43,8 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.*;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
|
@ -164,10 +164,6 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
return indexServicesProvider.getMapperService();
|
||||
}
|
||||
|
||||
public IndexQueryParserService queryParserService() {
|
||||
return indexServicesProvider.getQueryParserService();
|
||||
}
|
||||
|
||||
public SimilarityService similarityService() {
|
||||
return indexServicesProvider.getSimilarityService();
|
||||
}
|
||||
|
@ -362,6 +358,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
return indexSettings;
|
||||
}
|
||||
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return new QueryShardContext(indexSettings, indexServicesProvider.getClient(), bitsetFilterCache(), indexServicesProvider.getIndexFieldDataService(), mapperService(), similarityService(), indexServicesProvider.getScriptService(), indexServicesProvider.getIndicesQueriesRegistry());
|
||||
}
|
||||
|
||||
private class StoreCloseListener implements Store.OnClose {
|
||||
private final ShardId shardId;
|
||||
private final boolean ownsShard;
|
||||
|
@ -452,11 +452,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
* The list of filtering aliases should be obtained by calling MetaData.filteringAliases.
|
||||
* Returns <tt>null</tt> if no filtering is required.</p>
|
||||
*/
|
||||
public Query aliasFilter(String... aliasNames) {
|
||||
public Query aliasFilter(QueryShardContext context, String... aliasNames) {
|
||||
if (aliasNames == null || aliasNames.length == 0) {
|
||||
return null;
|
||||
}
|
||||
final IndexQueryParserService indexQueryParser = queryParserService();
|
||||
final ImmutableOpenMap<String, AliasMetaData> aliases = indexSettings.getIndexMetaData().getAliases();
|
||||
if (aliasNames.length == 1) {
|
||||
AliasMetaData alias = aliases.get(aliasNames[0]);
|
||||
|
@ -464,7 +463,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
// This shouldn't happen unless alias disappeared after filteringAliases was called.
|
||||
throw new InvalidAliasNameException(index(), aliasNames[0], "Unknown alias name was passed to alias Filter");
|
||||
}
|
||||
return parse(alias, indexQueryParser);
|
||||
return parse(alias, context);
|
||||
} else {
|
||||
// we need to bench here a bit, to see maybe it makes sense to use OrFilter
|
||||
BooleanQuery.Builder combined = new BooleanQuery.Builder();
|
||||
|
@ -472,9 +471,9 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
AliasMetaData alias = aliases.get(aliasName);
|
||||
if (alias == null) {
|
||||
// This shouldn't happen unless alias disappeared after filteringAliases was called.
|
||||
throw new InvalidAliasNameException(indexQueryParser.index(), aliasNames[0], "Unknown alias name was passed to alias Filter");
|
||||
throw new InvalidAliasNameException(indexSettings.getIndex(), aliasNames[0], "Unknown alias name was passed to alias Filter");
|
||||
}
|
||||
Query parsedFilter = parse(alias, indexQueryParser);
|
||||
Query parsedFilter = parse(alias, context);
|
||||
if (parsedFilter != null) {
|
||||
combined.add(parsedFilter, BooleanClause.Occur.SHOULD);
|
||||
} else {
|
||||
|
@ -486,18 +485,18 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
}
|
||||
}
|
||||
|
||||
private Query parse(AliasMetaData alias, IndexQueryParserService indexQueryParser) {
|
||||
private Query parse(AliasMetaData alias, QueryShardContext parseContext) {
|
||||
if (alias.filter() == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
byte[] filterSource = alias.filter().uncompressed();
|
||||
try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
|
||||
ParsedQuery parsedFilter = indexQueryParser.parseInnerFilter(parser);
|
||||
ParsedQuery parsedFilter = parseContext.parseInnerFilter(parser);
|
||||
return parsedFilter == null ? null : parsedFilter.query();
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new AliasFilterParsingException(indexQueryParser.index(), alias.getAlias(), "Invalid alias filter", ex);
|
||||
throw new AliasFilterParsingException(parseContext.index(), alias.getAlias(), "Invalid alias filter", ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -27,14 +28,14 @@ import org.elasticsearch.index.codec.CodecService;
|
|||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
/**
|
||||
|
@ -46,7 +47,6 @@ public final class IndexServicesProvider {
|
|||
|
||||
private final ThreadPool threadPool;
|
||||
private final MapperService mapperService;
|
||||
private final IndexQueryParserService queryParserService;
|
||||
private final IndexCache indexCache;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final CodecService codecService;
|
||||
|
@ -58,13 +58,15 @@ public final class IndexServicesProvider {
|
|||
private final BigArrays bigArrays;
|
||||
private final IndexingMemoryController indexingMemoryController;
|
||||
private final IndexEventListener listener;
|
||||
private final Client client;
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private final ScriptService scriptService;
|
||||
|
||||
@Inject
|
||||
public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController) {
|
||||
public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController, Client client, ScriptService scriptService, IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
this.listener = listener;
|
||||
this.threadPool = threadPool;
|
||||
this.mapperService = mapperService;
|
||||
this.queryParserService = queryParserService;
|
||||
this.indexCache = indexCache;
|
||||
this.indicesQueryCache = indicesQueryCache;
|
||||
this.codecService = codecService;
|
||||
|
@ -75,6 +77,9 @@ public final class IndexServicesProvider {
|
|||
this.factory = factory;
|
||||
this.bigArrays = bigArrays;
|
||||
this.indexingMemoryController = indexingMemoryController;
|
||||
this.client = client;
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.scriptService = scriptService;
|
||||
}
|
||||
|
||||
public IndexEventListener getIndexEventListener() {
|
||||
|
@ -88,10 +93,6 @@ public final class IndexServicesProvider {
|
|||
return mapperService;
|
||||
}
|
||||
|
||||
public IndexQueryParserService getQueryParserService() {
|
||||
return queryParserService;
|
||||
}
|
||||
|
||||
public IndexCache getIndexCache() {
|
||||
return indexCache;
|
||||
}
|
||||
|
@ -126,6 +127,18 @@ public final class IndexServicesProvider {
|
|||
|
||||
public BigArrays getBigArrays() { return bigArrays; }
|
||||
|
||||
public Client getClient() {
|
||||
return client;
|
||||
}
|
||||
|
||||
public IndicesQueriesRegistry getIndicesQueriesRegistry() {
|
||||
return indicesQueriesRegistry;
|
||||
}
|
||||
|
||||
public ScriptService getScriptService() {
|
||||
return scriptService;
|
||||
}
|
||||
|
||||
public IndexingMemoryController getIndexingMemoryController() {
|
||||
return indexingMemoryController;
|
||||
}
|
||||
|
|
|
@ -20,16 +20,20 @@ package org.elasticsearch.index;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* This class encapsulates all index level settings and handles settings updates.
|
||||
|
@ -39,6 +43,12 @@ import java.util.function.Consumer;
|
|||
* be called for each settings update.
|
||||
*/
|
||||
public final class IndexSettings {
|
||||
|
||||
public static final String DEFAULT_FIELD = "index.query.default_field";
|
||||
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
|
||||
public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard";
|
||||
public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard";
|
||||
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
|
||||
private final String uuid;
|
||||
private final List<Consumer<Settings>> updateListeners;
|
||||
private final Index index;
|
||||
|
@ -48,10 +58,51 @@ public final class IndexSettings {
|
|||
private final Settings nodeSettings;
|
||||
private final int numberOfShards;
|
||||
private final boolean isShadowReplicaIndex;
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
// volatile fields are updated via #updateIndexMetaData(IndexMetaData) under lock
|
||||
private volatile Settings settings;
|
||||
private volatile IndexMetaData indexMetaData;
|
||||
private final String defaultField;
|
||||
private final boolean queryStringLenient;
|
||||
private final boolean queryStringAnalyzeWildcard;
|
||||
private final boolean queryStringAllowLeadingWildcard;
|
||||
private final boolean defaultAllowUnmappedFields;
|
||||
private final Predicate<String> indexNameMatcher;
|
||||
|
||||
/**
|
||||
* Returns the default search field for this index.
|
||||
*/
|
||||
public String getDefaultField() {
|
||||
return defaultField;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if query string parsing should be lenient. The default is <code>false</code>
|
||||
*/
|
||||
public boolean isQueryStringLenient() {
|
||||
return queryStringLenient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if the query string should analyze wildcards. The default is <code>false</code>
|
||||
*/
|
||||
public boolean isQueryStringAnalyzeWildcard() {
|
||||
return queryStringAnalyzeWildcard;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if the query string parser should allow leading wildcards. The default is <code>true</code>
|
||||
*/
|
||||
public boolean isQueryStringAllowLeadingWildcard() {
|
||||
return queryStringAllowLeadingWildcard;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if queries should be lenient about unmapped fields. The default is <code>true</code>
|
||||
*/
|
||||
public boolean isDefaultAllowUnmappedFields() {
|
||||
return defaultAllowUnmappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexSettings} instance. The given node settings will be merged with the settings in the metadata
|
||||
|
@ -62,6 +113,19 @@ public final class IndexSettings {
|
|||
* @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated
|
||||
*/
|
||||
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection<Consumer<Settings>> updateListeners) {
|
||||
this(indexMetaData, nodeSettings, updateListeners, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexSettings} instance. The given node settings will be merged with the settings in the metadata
|
||||
* while index level settings will overwrite node settings.
|
||||
*
|
||||
* @param indexMetaData the index metadata this settings object is associated with
|
||||
* @param nodeSettings the nodes settings this index is allocated on.
|
||||
* @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated
|
||||
* @param indexNameMatcher a matcher that can resolve an expression to the index name or index alias
|
||||
*/
|
||||
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection<Consumer<Settings>> updateListeners, final Predicate<String> indexNameMatcher) {
|
||||
this.nodeSettings = nodeSettings;
|
||||
this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build();
|
||||
this.updateListeners = Collections.unmodifiableList(new ArrayList<>(updateListeners));
|
||||
|
@ -73,6 +137,25 @@ public final class IndexSettings {
|
|||
this.indexMetaData = indexMetaData;
|
||||
numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
|
||||
isShadowReplicaIndex = IndexMetaData.isIndexUsingShadowReplicas(settings);
|
||||
|
||||
this.defaultField = settings.get(DEFAULT_FIELD, AllFieldMapper.NAME);
|
||||
this.queryStringLenient = settings.getAsBoolean(QUERY_STRING_LENIENT, false);
|
||||
this.queryStringAnalyzeWildcard = settings.getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false);
|
||||
this.queryStringAllowLeadingWildcard = settings.getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true);
|
||||
this.parseFieldMatcher = new ParseFieldMatcher(settings);
|
||||
this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true);
|
||||
this.indexNameMatcher = indexNameMatcher;
|
||||
assert indexNameMatcher.test(indexMetaData.getIndex());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexSettings} instance adding the given listeners to the settings
|
||||
*/
|
||||
IndexSettings newWithListener(final Collection<Consumer<Settings>> updateListeners) {
|
||||
ArrayList<Consumer<Settings>> newUpdateListeners = new ArrayList<>(updateListeners);
|
||||
newUpdateListeners.addAll(this.updateListeners);
|
||||
return new IndexSettings(indexMetaData, nodeSettings, newUpdateListeners, indexNameMatcher);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -138,9 +221,7 @@ public final class IndexSettings {
|
|||
* Returns <code>true</code> iff this index uses shadow replicas.
|
||||
* @see IndexMetaData#isIndexUsingShadowReplicas(Settings)
|
||||
*/
|
||||
public boolean isShadowReplicaIndex() {
|
||||
return isShadowReplicaIndex;
|
||||
}
|
||||
public boolean isShadowReplicaIndex() { return isShadowReplicaIndex; }
|
||||
|
||||
/**
|
||||
* Returns the node settings. The settings retured from {@link #getSettings()} are a merged version of the
|
||||
|
@ -150,6 +231,14 @@ public final class IndexSettings {
|
|||
return nodeSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link ParseFieldMatcher} for this index.
|
||||
*/
|
||||
public ParseFieldMatcher getParseFieldMatcher() { return parseFieldMatcher; }
|
||||
|
||||
public boolean isMatchIndexName(String expression) {
|
||||
return indexNameMatcher.test(expression);
|
||||
}
|
||||
/**
|
||||
* Updates the settings and index metadata and notifies all registered settings consumers with the new settings iff at least one setting has changed.
|
||||
*
|
||||
|
|
|
@ -220,8 +220,8 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
return super.termsQuery(values, context);
|
||||
}
|
||||
|
||||
List<String> types = new ArrayList<>(context.mapperService().types().size());
|
||||
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
|
||||
List<String> types = new ArrayList<>(context.getMapperService().types().size());
|
||||
for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
|
||||
if (!documentMapper.parentFieldMapper().active()) {
|
||||
types.add(documentMapper.type());
|
||||
}
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentTypeListener;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -69,7 +68,6 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
public final String MAP_UNMAPPED_FIELDS_AS_STRING = "index.percolator.map_unmapped_fields_as_string";
|
||||
|
||||
// This is a shard level service, but these below are index level service:
|
||||
private final IndexQueryParserService queryParserService;
|
||||
private final MapperService mapperService;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
|
||||
|
@ -79,18 +77,20 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
private final RealTimePercolatorOperationListener realTimePercolatorOperationListener = new RealTimePercolatorOperationListener();
|
||||
private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener();
|
||||
private final AtomicBoolean realTimePercolatorEnabled = new AtomicBoolean(false);
|
||||
private final QueryShardContext queryShardContext;
|
||||
private boolean mapUnmappedFieldsAsString;
|
||||
private final MeanMetric percolateMetric = new MeanMetric();
|
||||
private final CounterMetric currentMetric = new CounterMetric();
|
||||
private final CounterMetric numberOfQueries = new CounterMetric();
|
||||
|
||||
public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings, IndexQueryParserService queryParserService,
|
||||
public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings,
|
||||
ShardIndexingService indexingService, MapperService mapperService,
|
||||
QueryShardContext queryShardContext,
|
||||
IndexFieldDataService indexFieldDataService) {
|
||||
super(shardId, indexSettings);
|
||||
this.queryParserService = queryParserService;
|
||||
this.mapperService = mapperService;
|
||||
this.indexingService = indexingService;
|
||||
this.queryShardContext = queryShardContext;
|
||||
this.indexFieldDataService = indexFieldDataService;
|
||||
this.mapUnmappedFieldsAsString = this.indexSettings.getAsBoolean(MAP_UNMAPPED_FIELDS_AS_STRING, false);
|
||||
mapperService.addTypeListener(percolateTypeListener);
|
||||
|
@ -179,7 +179,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
if (type != null) {
|
||||
previousTypes = QueryShardContext.setTypesWithPrevious(type);
|
||||
}
|
||||
QueryShardContext context = queryParserService.getShardContext();
|
||||
QueryShardContext context = queryShardContext.clone();
|
||||
try {
|
||||
context.reset(parser);
|
||||
// This means that fields in the query need to exist in the mapping prior to registering this query
|
||||
|
@ -196,7 +196,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
// as an analyzed string.
|
||||
context.setAllowUnmappedFields(false);
|
||||
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
|
||||
return queryParserService.parseInnerQuery(context);
|
||||
return context.parseInnerQuery();
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
|
||||
} finally {
|
||||
|
|
|
@ -245,10 +245,10 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
|
|||
if (fieldType != null) {
|
||||
analyzerObj = context.getSearchAnalyzer(fieldType);
|
||||
} else {
|
||||
analyzerObj = context.mapperService().searchAnalyzer();
|
||||
analyzerObj = context.getMapperService().searchAnalyzer();
|
||||
}
|
||||
} else {
|
||||
analyzerObj = context.mapperService().analysisService().analyzer(analyzer);
|
||||
analyzerObj = context.getMapperService().analysisService().analyzer(analyzer);
|
||||
if (analyzerObj == null) {
|
||||
throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found");
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
|
|||
}
|
||||
|
||||
public static Query newFilter(QueryShardContext context, String fieldPattern) {
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.getMapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
if (fieldNamesFieldType == null) {
|
||||
// can only happen when no types exist, so no docs exist either
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
|
|
|
@ -217,7 +217,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
}
|
||||
innerQuery.setBoost(boost);
|
||||
|
||||
DocumentMapper childDocMapper = context.mapperService().documentMapper(type);
|
||||
DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
|
||||
if (childDocMapper == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
|
||||
}
|
||||
|
@ -231,10 +231,10 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper);
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), childDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : type;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
|
@ -242,7 +242,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
}
|
||||
|
||||
String parentType = parentFieldMapper.type();
|
||||
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
|
||||
DocumentMapper parentDocMapper = context.getMapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type ["
|
||||
+ parentType + "]");
|
||||
|
|
|
@ -130,7 +130,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
return null;
|
||||
}
|
||||
innerQuery.setBoost(boost);
|
||||
DocumentMapper parentDocMapper = context.mapperService().documentMapper(type);
|
||||
DocumentMapper parentDocMapper = context.getMapperService().documentMapper(type);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryShardException(context, "[has_parent] query configured 'parent_type' [" + type
|
||||
+ "] is not a valid type");
|
||||
|
@ -142,10 +142,10 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), parentDocMapper);
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), parentDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : type;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
|
@ -155,10 +155,10 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
Set<String> parentTypes = new HashSet<>(5);
|
||||
parentTypes.add(parentDocMapper.type());
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = null;
|
||||
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
|
||||
for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active()) {
|
||||
DocumentMapper parentTypeDocumentMapper = context.mapperService().documentMapper(parentFieldMapper.type());
|
||||
DocumentMapper parentTypeDocumentMapper = context.getMapperService().documentMapper(parentFieldMapper.type());
|
||||
parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
|
||||
if (parentTypeDocumentMapper == null) {
|
||||
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
|
||||
|
@ -172,14 +172,14 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
|
||||
Query parentTypeQuery = null;
|
||||
if (parentTypes.size() == 1) {
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypes.iterator().next());
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypes.iterator().next());
|
||||
if (documentMapper != null) {
|
||||
parentTypeQuery = documentMapper.typeFilter();
|
||||
}
|
||||
} else {
|
||||
BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
|
||||
for (String parentTypeStr : parentTypes) {
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr);
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypeStr);
|
||||
if (documentMapper != null) {
|
||||
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||
if (types.length == 0) {
|
||||
typesForQuery = context.queryTypes();
|
||||
} else if (types.length == 1 && MetaData.ALL.equals(types[0])) {
|
||||
typesForQuery = context.mapperService().types();
|
||||
typesForQuery = context.getMapperService().types();
|
||||
} else {
|
||||
typesForQuery = new HashSet<>();
|
||||
Collections.addAll(typesForQuery, types);
|
||||
|
|
|
@ -1,272 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class IndexQueryParserService extends AbstractIndexComponent {
|
||||
|
||||
public static final String DEFAULT_FIELD = "index.query.default_field";
|
||||
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
|
||||
public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard";
|
||||
public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard";
|
||||
public static final String PARSE_STRICT = "index.query.parse.strict";
|
||||
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
|
||||
private final InnerHitsQueryParserHelper innerHitsQueryParserHelper;
|
||||
|
||||
private CloseableThreadLocal<QueryShardContext> cache = new CloseableThreadLocal<QueryShardContext>() {
|
||||
@Override
|
||||
protected QueryShardContext initialValue() {
|
||||
return new QueryShardContext(IndexQueryParserService.this);
|
||||
}
|
||||
};
|
||||
|
||||
final AnalysisService analysisService;
|
||||
|
||||
final ScriptService scriptService;
|
||||
|
||||
final MapperService mapperService;
|
||||
|
||||
final SimilarityService similarityService;
|
||||
|
||||
final IndexCache indexCache;
|
||||
|
||||
protected IndexFieldDataService fieldDataService;
|
||||
|
||||
final ClusterService clusterService;
|
||||
|
||||
final IndexNameExpressionResolver indexNameExpressionResolver;
|
||||
|
||||
final BitsetFilterCache bitsetFilterCache;
|
||||
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
|
||||
private final String defaultField;
|
||||
private final boolean queryStringLenient;
|
||||
private final boolean queryStringAnalyzeWildcard;
|
||||
private final boolean queryStringAllowLeadingWildcard;
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
private final boolean defaultAllowUnmappedFields;
|
||||
private final Client client;
|
||||
|
||||
@Inject
|
||||
public IndexQueryParserService(IndexSettings indexSettings,
|
||||
IndicesQueriesRegistry indicesQueriesRegistry,
|
||||
ScriptService scriptService, AnalysisService analysisService,
|
||||
MapperService mapperService, IndexCache indexCache, IndexFieldDataService fieldDataService,
|
||||
BitsetFilterCache bitsetFilterCache,
|
||||
@Nullable SimilarityService similarityService, ClusterService clusterService,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
InnerHitsQueryParserHelper innerHitsQueryParserHelper, Client client) {
|
||||
super(indexSettings);
|
||||
this.scriptService = scriptService;
|
||||
this.analysisService = analysisService;
|
||||
this.mapperService = mapperService;
|
||||
this.similarityService = similarityService;
|
||||
this.indexCache = indexCache;
|
||||
this.fieldDataService = fieldDataService;
|
||||
this.bitsetFilterCache = bitsetFilterCache;
|
||||
this.clusterService = clusterService;
|
||||
this.indexNameExpressionResolver = indexNameExpressionResolver;
|
||||
|
||||
this.defaultField = this.indexSettings.getSettings().get(DEFAULT_FIELD, AllFieldMapper.NAME);
|
||||
this.queryStringLenient = this.indexSettings.getSettings().getAsBoolean(QUERY_STRING_LENIENT, false);
|
||||
this.queryStringAnalyzeWildcard = indexSettings.getSettings().getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false);
|
||||
this.queryStringAllowLeadingWildcard = indexSettings.getSettings().getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true);
|
||||
this.parseFieldMatcher = new ParseFieldMatcher(this.indexSettings.getSettings());
|
||||
this.defaultAllowUnmappedFields = this.indexSettings.getSettings().getAsBoolean(ALLOW_UNMAPPED, true);
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.innerHitsQueryParserHelper = innerHitsQueryParserHelper;
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
cache.close();
|
||||
}
|
||||
|
||||
public String defaultField() {
|
||||
return this.defaultField;
|
||||
}
|
||||
|
||||
public boolean queryStringAnalyzeWildcard() {
|
||||
return this.queryStringAnalyzeWildcard;
|
||||
}
|
||||
|
||||
public boolean queryStringAllowLeadingWildcard() {
|
||||
return this.queryStringAllowLeadingWildcard;
|
||||
}
|
||||
|
||||
public boolean queryStringLenient() {
|
||||
return this.queryStringLenient;
|
||||
}
|
||||
|
||||
public IndicesQueriesRegistry indicesQueriesRegistry() {
|
||||
return indicesQueriesRegistry;
|
||||
}
|
||||
|
||||
public ParsedQuery parse(BytesReference source) {
|
||||
QueryShardContext context = cache.get();
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
return innerParse(context, parser);
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public ParsedQuery parse(XContentParser parser) {
|
||||
try {
|
||||
return innerParse(cache.get(), parser);
|
||||
} catch(IOException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an inner filter, returning null if the filter should be ignored.
|
||||
*/
|
||||
@Nullable
|
||||
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
|
||||
QueryShardContext context = cache.get();
|
||||
context.reset(parser);
|
||||
try {
|
||||
context.parseFieldMatcher(parseFieldMatcher);
|
||||
Query filter = context.parseContext().parseInnerQueryBuilder().toFilter(context);
|
||||
if (filter == null) {
|
||||
return null;
|
||||
}
|
||||
return new ParsedQuery(filter, context.copyNamedQueries());
|
||||
} finally {
|
||||
context.reset(null);
|
||||
}
|
||||
}
|
||||
|
||||
public QueryShardContext getShardContext() {
|
||||
return cache.get();
|
||||
}
|
||||
|
||||
public boolean defaultAllowUnmappedFields() {
|
||||
return defaultAllowUnmappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The lowest node version in the cluster when the index was created or <code>null</code> if that was unknown
|
||||
*/
|
||||
public Version getIndexCreatedVersion() {
|
||||
return indexSettings.getIndexVersionCreated();
|
||||
}
|
||||
|
||||
private ParsedQuery innerParse(QueryShardContext context, XContentParser parser) throws IOException, QueryShardException {
|
||||
context.reset(parser);
|
||||
try {
|
||||
context.parseFieldMatcher(parseFieldMatcher);
|
||||
Query query = parseInnerQuery(context);
|
||||
return new ParsedQuery(query, context.copyNamedQueries());
|
||||
} finally {
|
||||
context.reset(null);
|
||||
}
|
||||
}
|
||||
|
||||
public Query parseInnerQuery(QueryShardContext context) throws IOException {
|
||||
return toQuery(context.parseContext().parseInnerQueryBuilder(), context);
|
||||
}
|
||||
|
||||
public ParsedQuery toQuery(QueryBuilder<?> queryBuilder) {
|
||||
QueryShardContext context = cache.get();
|
||||
context.reset();
|
||||
context.parseFieldMatcher(parseFieldMatcher);
|
||||
try {
|
||||
Query query = toQuery(queryBuilder, context);
|
||||
return new ParsedQuery(query, context.copyNamedQueries());
|
||||
} catch(QueryShardException | ParsingException e ) {
|
||||
throw e;
|
||||
} catch(Exception e) {
|
||||
throw new QueryShardException(context, "failed to create query: {}", e, queryBuilder);
|
||||
} finally {
|
||||
context.reset();
|
||||
}
|
||||
}
|
||||
|
||||
private static Query toQuery(QueryBuilder<?> queryBuilder, QueryShardContext context) throws IOException {
|
||||
Query query = queryBuilder.toQuery(context);
|
||||
if (query == null) {
|
||||
query = Queries.newMatchNoDocsQuery();
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
public ParseFieldMatcher parseFieldMatcher() {
|
||||
return parseFieldMatcher;
|
||||
}
|
||||
|
||||
public boolean matchesIndices(String... indices) {
|
||||
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterService.state(), IndicesOptions.lenientExpandOpen(), indices);
|
||||
for (String index : concreteIndices) {
|
||||
if (Regex.simpleMatch(index, index().name())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public InnerHitsQueryParserHelper getInnerHitsQueryParserHelper() {
|
||||
return innerHitsQueryParserHelper;
|
||||
}
|
||||
|
||||
public Client getClient() {
|
||||
return client;
|
||||
}
|
||||
}
|
|
@ -346,7 +346,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
// validate context specific fields
|
||||
if (analyzer != null && context.analysisService().analyzer(analyzer) == null) {
|
||||
if (analyzer != null && context.getAnalysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryShardException(context, "[match] analyzer [" + analyzer + "] not found");
|
||||
}
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
|
|||
throw new QueryShardException(context, "missing must have either existence, or null_value, or both set to true");
|
||||
}
|
||||
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.getMapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
if (fieldNamesFieldType == null) {
|
||||
// can only happen when no types exist, so no docs exist either
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
|
|
|
@ -775,7 +775,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
MoreLikeThisQuery mltQuery = new MoreLikeThisQuery();
|
||||
|
||||
// set similarity
|
||||
mltQuery.setSimilarity(context.searchSimilarity());
|
||||
mltQuery.setSimilarity(context.getSearchSimilarity());
|
||||
|
||||
// set query parameters
|
||||
mltQuery.setMaxQueryTerms(maxQueryTerms);
|
||||
|
@ -796,9 +796,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
}
|
||||
|
||||
// set analyzer
|
||||
Analyzer analyzerObj = context.analysisService().analyzer(analyzer);
|
||||
Analyzer analyzerObj = context.getAnalysisService().analyzer(analyzer);
|
||||
if (analyzerObj == null) {
|
||||
analyzerObj = context.mapperService().searchAnalyzer();
|
||||
analyzerObj = context.getMapperService().searchAnalyzer();
|
||||
}
|
||||
mltQuery.setAnalyzer(analyzerObj);
|
||||
|
||||
|
|
|
@ -504,7 +504,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
MultiMatchQuery multiMatchQuery = new MultiMatchQuery(context);
|
||||
if (analyzer != null) {
|
||||
if (context.analysisService().analyzer(analyzer) == null) {
|
||||
if (context.getAnalysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] analyzer [" + analyzer + "] not found");
|
||||
}
|
||||
multiMatchQuery.setAnalyzer(analyzer);
|
||||
|
@ -539,7 +539,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
}
|
||||
}
|
||||
|
||||
Map<String, Float> newFieldsBoosts = handleFieldsMatchPattern(context.mapperService(), fieldsBoosts);
|
||||
Map<String, Float> newFieldsBoosts = handleFieldsMatchPattern(context.getMapperService(), fieldsBoosts);
|
||||
|
||||
Query query = multiMatchQuery.parse(type, newFieldsBoosts, value, minimumShouldMatch);
|
||||
if (query == null) {
|
||||
|
|
|
@ -212,7 +212,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
|||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
|
||||
|
|
|
@ -27,14 +27,19 @@ import org.apache.lucene.search.join.BitSetProducer;
|
|||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
|
@ -42,15 +47,20 @@ import org.elasticsearch.index.mapper.MapperBuilders;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
||||
import org.elasticsearch.index.query.support.NestedScope;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
|
@ -63,7 +73,14 @@ import static java.util.Collections.unmodifiableMap;
|
|||
*/
|
||||
public class QueryShardContext {
|
||||
|
||||
private static ThreadLocal<String[]> typesContext = new ThreadLocal<>();
|
||||
private static final ThreadLocal<String[]> typesContext = new ThreadLocal<>();
|
||||
private final MapperService mapperService;
|
||||
private final ScriptService scriptService;
|
||||
private final SimilarityService similarityService;
|
||||
private final BitsetFilterCache bitsetFilterCache;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
private final IndexSettings indexSettings;
|
||||
private final Client client;
|
||||
|
||||
public static void setTypes(String[] types) {
|
||||
typesContext.set(types);
|
||||
|
@ -83,28 +100,31 @@ public class QueryShardContext {
|
|||
typesContext.remove();
|
||||
}
|
||||
|
||||
private final Version indexVersionCreated;
|
||||
|
||||
private final IndexQueryParserService indexQueryParser;
|
||||
|
||||
private final Map<String, Query> namedQueries = new HashMap<>();
|
||||
|
||||
private final MapperQueryParser queryParser = new MapperQueryParser(this);
|
||||
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private boolean allowUnmappedFields;
|
||||
|
||||
private boolean mapUnmappedFieldAsString;
|
||||
|
||||
private NestedScope nestedScope;
|
||||
|
||||
private QueryParseContext parseContext;
|
||||
boolean isFilter; // pkg private for testing
|
||||
|
||||
boolean isFilter;
|
||||
public QueryShardContext(IndexSettings indexSettings, Client client, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
|
||||
final IndicesQueriesRegistry indicesQueriesRegistry) {
|
||||
this.indexSettings = indexSettings;
|
||||
this.scriptService = scriptService;
|
||||
this.client = client;
|
||||
this.similarityService = similarityService;
|
||||
this.mapperService = mapperService;
|
||||
this.bitsetFilterCache = bitsetFilterCache;
|
||||
this.indexFieldDataService = indexFieldDataService;
|
||||
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.parseContext = new QueryParseContext(indicesQueriesRegistry);
|
||||
}
|
||||
|
||||
public QueryShardContext(IndexQueryParserService indexQueryParser) {
|
||||
this.indexVersionCreated = indexQueryParser.getIndexCreatedVersion();
|
||||
this.indexQueryParser = indexQueryParser;
|
||||
this.parseContext = new QueryParseContext(indexQueryParser.indicesQueriesRegistry());
|
||||
public QueryShardContext clone() {
|
||||
return new QueryShardContext(indexSettings, client, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
|
||||
}
|
||||
|
||||
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
|
||||
|
@ -116,11 +136,12 @@ public class QueryShardContext {
|
|||
}
|
||||
|
||||
public void reset() {
|
||||
allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields();
|
||||
allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
|
||||
this.parseFieldMatcher(ParseFieldMatcher.EMPTY);
|
||||
this.lookup = null;
|
||||
this.namedQueries.clear();
|
||||
this.nestedScope = new NestedScope();
|
||||
this.isFilter = false;
|
||||
}
|
||||
|
||||
public void reset(XContentParser jp) {
|
||||
|
@ -129,43 +150,43 @@ public class QueryShardContext {
|
|||
}
|
||||
|
||||
public Index index() {
|
||||
return this.indexQueryParser.index();
|
||||
return this.mapperService.getIndexSettings().getIndex();
|
||||
}
|
||||
|
||||
public IndexQueryParserService indexQueryParserService() {
|
||||
return indexQueryParser;
|
||||
public InnerHitsSubSearchContext getInnerHitsContext(XContentParser parser) throws IOException {
|
||||
return InnerHitsQueryParserHelper.parse(parser);
|
||||
}
|
||||
|
||||
public AnalysisService analysisService() {
|
||||
return indexQueryParser.analysisService;
|
||||
public AnalysisService getAnalysisService() {
|
||||
return mapperService.analysisService();
|
||||
}
|
||||
|
||||
public ScriptService scriptService() {
|
||||
return indexQueryParser.scriptService;
|
||||
public ScriptService getScriptService() {
|
||||
return scriptService;
|
||||
}
|
||||
|
||||
public MapperService mapperService() {
|
||||
return indexQueryParser.mapperService;
|
||||
public MapperService getMapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public Similarity searchSimilarity() {
|
||||
return indexQueryParser.similarityService != null ? indexQueryParser.similarityService.similarity(indexQueryParser.mapperService) : null;
|
||||
public Similarity getSearchSimilarity() {
|
||||
return similarityService != null ? similarityService.similarity(mapperService) : null;
|
||||
}
|
||||
|
||||
public String defaultField() {
|
||||
return indexQueryParser.defaultField();
|
||||
return indexSettings.getDefaultField();
|
||||
}
|
||||
|
||||
public boolean queryStringLenient() {
|
||||
return indexQueryParser.queryStringLenient();
|
||||
return indexSettings.isQueryStringLenient();
|
||||
}
|
||||
|
||||
public boolean queryStringAnalyzeWildcard() {
|
||||
return indexQueryParser.queryStringAnalyzeWildcard();
|
||||
return indexSettings.isQueryStringAnalyzeWildcard();
|
||||
}
|
||||
|
||||
public boolean queryStringAllowLeadingWildcard() {
|
||||
return indexQueryParser.queryStringAllowLeadingWildcard();
|
||||
return indexSettings.isQueryStringAllowLeadingWildcard();
|
||||
}
|
||||
|
||||
public MapperQueryParser queryParser(QueryParserSettings settings) {
|
||||
|
@ -174,11 +195,11 @@ public class QueryShardContext {
|
|||
}
|
||||
|
||||
public BitSetProducer bitsetFilter(Query filter) {
|
||||
return indexQueryParser.bitsetFilterCache.getBitSetProducer(filter);
|
||||
return bitsetFilterCache.getBitSetProducer(filter);
|
||||
}
|
||||
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
|
||||
return indexQueryParser.fieldDataService.getForField(mapper);
|
||||
return indexFieldDataService.getForField(mapper);
|
||||
}
|
||||
|
||||
public void addNamedQuery(String name, Query query) {
|
||||
|
@ -211,7 +232,7 @@ public class QueryShardContext {
|
|||
|
||||
InnerHitsContext innerHitsContext;
|
||||
if (sc.innerHits() == null) {
|
||||
innerHitsContext = new InnerHitsContext(new HashMap<String, InnerHitsContext.BaseInnerHits>());
|
||||
innerHitsContext = new InnerHitsContext(new HashMap<>());
|
||||
sc.innerHits(innerHitsContext);
|
||||
} else {
|
||||
innerHitsContext = sc.innerHits();
|
||||
|
@ -220,15 +241,15 @@ public class QueryShardContext {
|
|||
}
|
||||
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern);
|
||||
return mapperService.simpleMatchToIndexNames(pattern);
|
||||
}
|
||||
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes()));
|
||||
return failIfFieldMappingNotFound(name, mapperService.smartNameFieldType(name, getTypes()));
|
||||
}
|
||||
|
||||
public ObjectMapper getObjectMapper(String name) {
|
||||
return indexQueryParser.mapperService.getObjectMapper(name, getTypes());
|
||||
return mapperService.getObjectMapper(name, getTypes());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -239,7 +260,7 @@ public class QueryShardContext {
|
|||
if (fieldType.searchAnalyzer() != null) {
|
||||
return fieldType.searchAnalyzer();
|
||||
}
|
||||
return mapperService().searchAnalyzer();
|
||||
return getMapperService().searchAnalyzer();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -250,7 +271,7 @@ public class QueryShardContext {
|
|||
if (fieldType.searchQuoteAnalyzer() != null) {
|
||||
return fieldType.searchQuoteAnalyzer();
|
||||
}
|
||||
return mapperService().searchQuoteAnalyzer();
|
||||
return getMapperService().searchQuoteAnalyzer();
|
||||
}
|
||||
|
||||
public void setAllowUnmappedFields(boolean allowUnmappedFields) {
|
||||
|
@ -266,11 +287,9 @@ public class QueryShardContext {
|
|||
return fieldMapping;
|
||||
} else if (mapUnmappedFieldAsString) {
|
||||
StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
|
||||
// it would be better to pass the real index settings, but they are not easily accessible from here...
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build();
|
||||
return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType();
|
||||
return builder.build(new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath(1))).fieldType();
|
||||
} else {
|
||||
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion();
|
||||
Version indexCreatedVersion = indexSettings.getIndexVersionCreated();
|
||||
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {
|
||||
throw new QueryShardException(this, "Strict field resolution and no field mapping can be found for the field with name ["
|
||||
+ name + "]");
|
||||
|
@ -286,10 +305,10 @@ public class QueryShardContext {
|
|||
public Collection<String> queryTypes() {
|
||||
String[] types = getTypes();
|
||||
if (types == null || types.length == 0) {
|
||||
return mapperService().types();
|
||||
return getMapperService().types();
|
||||
}
|
||||
if (types.length == 1 && types[0].equals("_all")) {
|
||||
return mapperService().types();
|
||||
return getMapperService().types();
|
||||
}
|
||||
return Arrays.asList(types);
|
||||
}
|
||||
|
@ -302,7 +321,7 @@ public class QueryShardContext {
|
|||
return current.lookup();
|
||||
}
|
||||
if (lookup == null) {
|
||||
lookup = new SearchLookup(mapperService(), indexQueryParser.fieldDataService, null);
|
||||
lookup = new SearchLookup(getMapperService(), indexFieldDataService, null);
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
@ -320,7 +339,7 @@ public class QueryShardContext {
|
|||
}
|
||||
|
||||
public Version indexVersionCreated() {
|
||||
return indexVersionCreated;
|
||||
return indexSettings.getIndexVersionCreated();
|
||||
}
|
||||
|
||||
public QueryParseContext parseContext() {
|
||||
|
@ -328,18 +347,105 @@ public class QueryShardContext {
|
|||
}
|
||||
|
||||
public boolean matchesIndices(String... indices) {
|
||||
return this.indexQueryParser.matchesIndices(indices);
|
||||
for (String index : indices) {
|
||||
if (indexSettings.isMatchIndexName(index)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* Executes the given template, and returns the response.
|
||||
*/
|
||||
public BytesReference executeQueryTemplate(Template template, SearchContext searchContext) {
|
||||
ExecutableScript executable = scriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext);
|
||||
ExecutableScript executable = getScriptService().executable(template, ScriptContext.Standard.SEARCH, searchContext);
|
||||
return (BytesReference) executable.run();
|
||||
}
|
||||
|
||||
public Client getClient() {
|
||||
return indexQueryParser.getClient();
|
||||
return client;
|
||||
}
|
||||
|
||||
public ParsedQuery parse(BytesReference source) {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
return innerParse(parser);
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public ParsedQuery parse(XContentParser parser) {
|
||||
try {
|
||||
return innerParse(parser);
|
||||
} catch(IOException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an inner filter, returning null if the filter should be ignored.
|
||||
*/
|
||||
@Nullable
|
||||
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
|
||||
reset(parser);
|
||||
try {
|
||||
parseFieldMatcher(indexSettings.getParseFieldMatcher());
|
||||
Query filter = parseContext().parseInnerQueryBuilder().toFilter(this);
|
||||
if (filter == null) {
|
||||
return null;
|
||||
}
|
||||
return new ParsedQuery(filter, copyNamedQueries());
|
||||
} finally {
|
||||
reset(null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private ParsedQuery innerParse(XContentParser parser) throws IOException, QueryShardException {
|
||||
reset(parser);
|
||||
try {
|
||||
parseFieldMatcher(indexSettings.getParseFieldMatcher());
|
||||
Query query = parseInnerQuery();
|
||||
return new ParsedQuery(query, copyNamedQueries());
|
||||
} finally {
|
||||
reset(null);
|
||||
}
|
||||
}
|
||||
|
||||
public Query parseInnerQuery() throws IOException {
|
||||
return toQuery(this.parseContext().parseInnerQueryBuilder(), this);
|
||||
}
|
||||
|
||||
public ParsedQuery toQuery(QueryBuilder<?> queryBuilder) {
|
||||
reset();
|
||||
parseFieldMatcher(indexSettings.getParseFieldMatcher());
|
||||
try {
|
||||
Query query = toQuery(queryBuilder, this);
|
||||
return new ParsedQuery(query, copyNamedQueries());
|
||||
} catch(QueryShardException | ParsingException e ) {
|
||||
throw e;
|
||||
} catch(Exception e) {
|
||||
throw new QueryShardException(this, "failed to create query: {}", e, queryBuilder);
|
||||
} finally {
|
||||
this.reset();
|
||||
}
|
||||
}
|
||||
|
||||
private static Query toQuery(QueryBuilder<?> queryBuilder, QueryShardContext context) throws IOException {
|
||||
Query query = queryBuilder.toQuery(context);
|
||||
if (query == null) {
|
||||
query = Queries.newMatchNoDocsQuery();
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -661,7 +661,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
String fieldName = fieldsEntry.getKey();
|
||||
Float weight = fieldsEntry.getValue();
|
||||
if (Regex.isSimpleMatchPattern(fieldName)) {
|
||||
for (String resolvedFieldName : context.mapperService().simpleMatchToIndexNames(fieldName)) {
|
||||
for (String resolvedFieldName : context.getMapperService().simpleMatchToIndexNames(fieldName)) {
|
||||
resolvedFields.put(resolvedFieldName, weight);
|
||||
}
|
||||
} else {
|
||||
|
@ -672,16 +672,16 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
qpSettings.defaultOperator(defaultOperator.toQueryParserOperator());
|
||||
|
||||
if (analyzer == null) {
|
||||
qpSettings.defaultAnalyzer(context.mapperService().searchAnalyzer());
|
||||
qpSettings.defaultAnalyzer(context.getMapperService().searchAnalyzer());
|
||||
} else {
|
||||
NamedAnalyzer namedAnalyzer = context.analysisService().analyzer(analyzer);
|
||||
NamedAnalyzer namedAnalyzer = context.getAnalysisService().analyzer(analyzer);
|
||||
if (namedAnalyzer == null) {
|
||||
throw new QueryShardException(context, "[query_string] analyzer [" + analyzer + "] not found");
|
||||
}
|
||||
qpSettings.forceAnalyzer(namedAnalyzer);
|
||||
}
|
||||
if (quoteAnalyzer != null) {
|
||||
NamedAnalyzer namedAnalyzer = context.analysisService().analyzer(quoteAnalyzer);
|
||||
NamedAnalyzer namedAnalyzer = context.getAnalysisService().analyzer(quoteAnalyzer);
|
||||
if (namedAnalyzer == null) {
|
||||
throw new QueryShardException(context, "[query_string] quote_analyzer [" + quoteAnalyzer + "] not found");
|
||||
}
|
||||
|
@ -689,7 +689,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
} else if (analyzer != null) {
|
||||
qpSettings.forceQuoteAnalyzer(qpSettings.analyzer());
|
||||
} else {
|
||||
qpSettings.defaultQuoteAnalyzer(context.mapperService().searchQuoteAnalyzer());
|
||||
qpSettings.defaultQuoteAnalyzer(context.getMapperService().searchQuoteAnalyzer());
|
||||
}
|
||||
|
||||
qpSettings.quoteFieldSuffix(quoteFieldSuffix);
|
||||
|
|
|
@ -69,7 +69,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
|||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
return new ScriptQuery(script, context.scriptService(), context.lookup());
|
||||
return new ScriptQuery(script, context.getScriptService(), context.lookup());
|
||||
}
|
||||
|
||||
static class ScriptQuery extends Query {
|
||||
|
|
|
@ -261,7 +261,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
} else {
|
||||
for (Map.Entry<String, Float> fieldEntry : fieldsAndWeights.entrySet()) {
|
||||
if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) {
|
||||
for (String fieldName : context.mapperService().simpleMatchToIndexNames(fieldEntry.getKey())) {
|
||||
for (String fieldName : context.getMapperService().simpleMatchToIndexNames(fieldEntry.getKey())) {
|
||||
resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue());
|
||||
}
|
||||
} else {
|
||||
|
@ -273,9 +273,9 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
// Use standard analyzer by default if none specified
|
||||
Analyzer luceneAnalyzer;
|
||||
if (analyzer == null) {
|
||||
luceneAnalyzer = context.mapperService().searchAnalyzer();
|
||||
luceneAnalyzer = context.getMapperService().searchAnalyzer();
|
||||
} else {
|
||||
luceneAnalyzer = context.analysisService().analyzer(analyzer);
|
||||
luceneAnalyzer = context.getAnalysisService().analyzer(analyzer);
|
||||
if (luceneAnalyzer == null) {
|
||||
throw new QueryShardException(context, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer
|
||||
+ "] not found");
|
||||
|
|
|
@ -102,7 +102,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
|
|||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
BytesReference querySource = context.executeQueryTemplate(template, SearchContext.current());
|
||||
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
|
||||
final QueryShardContext contextCopy = new QueryShardContext(context.indexQueryParserService());
|
||||
final QueryShardContext contextCopy = context.clone();
|
||||
contextCopy.reset(qSourceParser);
|
||||
QueryBuilder result = contextCopy.parseContext().parseInnerQueryBuilder();
|
||||
context.combineNamedQueries(contextCopy);
|
||||
|
|
|
@ -76,7 +76,7 @@ public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
|
|||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
Query filter;
|
||||
//LUCENE 4 UPGRADE document mapper should use bytesref as well?
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper(type.utf8ToString());
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(type.utf8ToString());
|
||||
if (documentMapper == null) {
|
||||
filter = new TermQuery(new Term(TypeFieldMapper.NAME, type));
|
||||
} else {
|
||||
|
|
|
@ -106,9 +106,9 @@ public class WrapperQueryBuilder extends AbstractQueryBuilder<WrapperQueryBuilde
|
|||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
try (XContentParser qSourceParser = XContentFactory.xContent(source).createParser(source)) {
|
||||
final QueryShardContext contextCopy = new QueryShardContext(context.indexQueryParserService());
|
||||
final QueryShardContext contextCopy = context.clone();
|
||||
contextCopy.reset(qSourceParser);
|
||||
contextCopy.parseFieldMatcher(context.indexQueryParserService().parseFieldMatcher());
|
||||
contextCopy.parseFieldMatcher(context.parseFieldMatcher());
|
||||
QueryBuilder<?> result = contextCopy.parseContext().parseInnerQueryBuilder();
|
||||
context.combineNamedQueries(contextCopy);
|
||||
return result.toQuery(context);
|
||||
|
|
|
@ -148,7 +148,7 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder<FieldV
|
|||
|
||||
@Override
|
||||
protected ScoreFunction doToFunction(QueryShardContext context) {
|
||||
MappedFieldType fieldType = context.mapperService().smartNameFieldType(field);
|
||||
MappedFieldType fieldType = context.getMapperService().smartNameFieldType(field);
|
||||
IndexNumericFieldData fieldData = null;
|
||||
if (fieldType == null) {
|
||||
if(missing == null) {
|
||||
|
|
|
@ -117,7 +117,7 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
|
|||
|
||||
@Override
|
||||
protected ScoreFunction doToFunction(QueryShardContext context) {
|
||||
final MappedFieldType fieldType = context.mapperService().smartNameFieldType("_uid");
|
||||
final MappedFieldType fieldType = context.getMapperService().smartNameFieldType("_uid");
|
||||
if (fieldType == null) {
|
||||
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used
|
||||
return new RandomScoreFunction();
|
||||
|
|
|
@ -89,7 +89,7 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder<ScriptScore
|
|||
@Override
|
||||
protected ScoreFunction doToFunction(QueryShardContext context) {
|
||||
try {
|
||||
SearchScript searchScript = context.scriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH);
|
||||
SearchScript searchScript = context.getScriptService().search(context.lookup(), script, ScriptContext.Standard.SEARCH);
|
||||
return new ScriptScoreFunction(script, searchScript);
|
||||
} catch (Exception e) {
|
||||
throw new QueryShardException(context, "script_score: the script could not be loaded", e);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
|
@ -34,22 +33,15 @@ import java.io.IOException;
|
|||
|
||||
public class InnerHitsQueryParserHelper {
|
||||
|
||||
private final SortParseElement sortParseElement;
|
||||
private final FetchSourceParseElement sourceParseElement;
|
||||
private final HighlighterParseElement highlighterParseElement;
|
||||
private final ScriptFieldsParseElement scriptFieldsParseElement;
|
||||
private final FieldDataFieldsParseElement fieldDataFieldsParseElement;
|
||||
public static final InnerHitsQueryParserHelper INSTANCE = new InnerHitsQueryParserHelper();
|
||||
|
||||
@Inject
|
||||
public InnerHitsQueryParserHelper(SortParseElement sortParseElement, FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, ScriptFieldsParseElement scriptFieldsParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement) {
|
||||
this.sortParseElement = sortParseElement;
|
||||
this.sourceParseElement = sourceParseElement;
|
||||
this.highlighterParseElement = highlighterParseElement;
|
||||
this.scriptFieldsParseElement = scriptFieldsParseElement;
|
||||
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
|
||||
}
|
||||
private static final SortParseElement sortParseElement = new SortParseElement();
|
||||
private static final FetchSourceParseElement sourceParseElement = new FetchSourceParseElement();
|
||||
private static final HighlighterParseElement highlighterParseElement = new HighlighterParseElement();
|
||||
private static final ScriptFieldsParseElement scriptFieldsParseElement = new ScriptFieldsParseElement();
|
||||
private static final FieldDataFieldsParseElement fieldDataFieldsParseElement = new FieldDataFieldsParseElement();
|
||||
|
||||
public InnerHitsSubSearchContext parse(XContentParser parser) throws IOException {
|
||||
public static InnerHitsSubSearchContext parse(XContentParser parser) throws IOException {
|
||||
String fieldName = null;
|
||||
XContentParser.Token token;
|
||||
String innerHitName = null;
|
||||
|
|
|
@ -61,8 +61,8 @@ public class NestedInnerQueryParseSupport {
|
|||
private ObjectMapper parentObjectMapper;
|
||||
|
||||
public NestedInnerQueryParseSupport(XContentParser parser, SearchContext searchContext) {
|
||||
parseContext = searchContext.queryParserService().getShardContext().parseContext();
|
||||
shardContext = searchContext.queryParserService().getShardContext();
|
||||
shardContext = searchContext.indexShard().getQueryShardContext();
|
||||
parseContext = shardContext.parseContext();
|
||||
shardContext.reset(parser);
|
||||
|
||||
}
|
||||
|
|
|
@ -214,9 +214,9 @@ public class MatchQuery {
|
|||
if (fieldType != null) {
|
||||
return context.getSearchAnalyzer(fieldType);
|
||||
}
|
||||
return context.mapperService().searchAnalyzer();
|
||||
return context.getMapperService().searchAnalyzer();
|
||||
} else {
|
||||
Analyzer analyzer = context.mapperService().analysisService().analyzer(this.analyzer);
|
||||
Analyzer analyzer = context.getMapperService().analysisService().analyzer(this.analyzer);
|
||||
if (analyzer == null) {
|
||||
throw new IllegalArgumentException("No analyzer found for [" + this.analyzer + "]");
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy;
|
|||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
import org.apache.lucene.search.UsageTrackingQueryCachingPolicy;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -81,6 +82,7 @@ import org.elasticsearch.index.mapper.*;
|
|||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.percolator.PercolateStats;
|
||||
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.recovery.RecoveryStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
|
@ -152,6 +154,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final IndexEventListener indexEventListener;
|
||||
private final IndexSettings idxSettings;
|
||||
private final IndexServicesProvider provider;
|
||||
|
||||
private TimeValue refreshInterval;
|
||||
|
||||
|
@ -252,13 +255,14 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
this.flushThresholdSize = this.indexSettings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB));
|
||||
this.disableFlush = this.indexSettings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false);
|
||||
this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId);
|
||||
|
||||
this.provider = provider;
|
||||
this.searcherWrapper = indexSearcherWrapper;
|
||||
this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, provider.getQueryParserService(), indexingService, mapperService, indexFieldDataService);
|
||||
this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, indexingService, mapperService, newQueryShardContext(), indexFieldDataService);
|
||||
if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) {
|
||||
percolatorQueriesRegistry.enableRealTimePercolator();
|
||||
}
|
||||
|
||||
|
||||
// We start up inactive
|
||||
active.set(false);
|
||||
}
|
||||
|
@ -762,7 +766,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
engine.flushAndClose();
|
||||
}
|
||||
} finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times
|
||||
IOUtils.close(engine, percolatorQueriesRegistry);
|
||||
IOUtils.close(engine, percolatorQueriesRegistry, queryShardContextCache);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1598,4 +1602,23 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
}
|
||||
}
|
||||
|
||||
private CloseableThreadLocal<QueryShardContext> queryShardContextCache = new CloseableThreadLocal<QueryShardContext>() {
|
||||
// TODO We should get rid of this threadlocal but I think it should be a sep change
|
||||
@Override
|
||||
protected QueryShardContext initialValue() {
|
||||
return newQueryShardContext();
|
||||
}
|
||||
};
|
||||
|
||||
private QueryShardContext newQueryShardContext() {
|
||||
return new QueryShardContext(idxSettings, provider.getClient(), indexCache.bitsetFilterCache(), indexFieldDataService, mapperService, similarityService, provider.getScriptService(), provider.getIndicesQueriesRegistry());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a threadlocal QueryShardContext for this shard.
|
||||
*/
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return queryShardContextCache.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,12 +28,16 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
|
|||
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.*;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
|
@ -50,7 +54,7 @@ import org.elasticsearch.index.get.GetStats;
|
|||
import org.elasticsearch.index.indexing.IndexingStats;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.recovery.RecoveryStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
|
@ -60,6 +64,7 @@ import org.elasticsearch.index.shard.IndexEventListener;
|
|||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.store.IndexStoreConfig;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.plugins.PluginsService;
|
||||
|
@ -72,6 +77,7 @@ import java.util.concurrent.CountDownLatch;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
@ -93,6 +99,10 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
private final IndicesWarmer indicesWarmer;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final AnalysisRegistry analysisRegistry;
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private final ClusterService clusterService;
|
||||
private final IndexNameExpressionResolver indexNameExpressionResolver;
|
||||
|
||||
private volatile Map<String, IndexServiceInjectorPair> indices = emptyMap();
|
||||
|
||||
public AnalysisRegistry getAnalysis() {
|
||||
|
@ -127,7 +137,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
}
|
||||
|
||||
@Inject
|
||||
public IndicesService(Settings settings, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesQueryCache indicesQueryCache, IndicesWarmer indicesWarmer, AnalysisRegistry analysisRegistry) {
|
||||
public IndicesService(Settings settings, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesQueryCache indicesQueryCache, IndicesWarmer indicesWarmer, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService) {
|
||||
super(settings);
|
||||
this.injector = injector;
|
||||
this.pluginsService = pluginsService;
|
||||
|
@ -137,6 +147,9 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS));
|
||||
this.indexStoreConfig = new IndexStoreConfig(settings);
|
||||
this.analysisRegistry = analysisRegistry;
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.clusterService = clusterService;
|
||||
this.indexNameExpressionResolver = indexNameExpressionResolver;
|
||||
nodeSettingsService.addListener(indexStoreConfig);
|
||||
}
|
||||
|
||||
|
@ -282,6 +295,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexService} for the given metadata.
|
||||
* @param indexMetaData the index metadata to create the index for
|
||||
|
@ -292,8 +306,9 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
if (!lifecycle.started()) {
|
||||
throw new IllegalStateException("Can't create an index [" + indexMetaData.getIndex() + "], node is closed");
|
||||
}
|
||||
|
||||
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST);
|
||||
final String indexName = indexMetaData.getIndex();
|
||||
final Predicate<String> indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state());
|
||||
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST, indexNameMatcher);
|
||||
Index index = new Index(indexMetaData.getIndex());
|
||||
if (indices.containsKey(index.name())) {
|
||||
throw new IndexAlreadyExistsException(index);
|
||||
|
@ -384,9 +399,6 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
|
||||
logger.debug("[{}] closing mapper service (reason [{}])", index, reason);
|
||||
indexInjector.getInstance(MapperService.class).close();
|
||||
logger.debug("[{}] closing index query parser service (reason [{}])", index, reason);
|
||||
indexInjector.getInstance(IndexQueryParserService.class).close();
|
||||
|
||||
logger.debug("[{}] closed... (reason [{}])", index, reason);
|
||||
listener.afterIndexClosed(indexService.index(), indexService.getIndexSettings().getSettings());
|
||||
if (delete) {
|
||||
|
@ -786,4 +798,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
|||
return deleteList.size();
|
||||
}
|
||||
}
|
||||
|
||||
public QueryParseContext newQueryParserContext() {
|
||||
return new QueryParseContext(indicesQueriesRegistry);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,7 +51,6 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
@ -423,11 +422,6 @@ public class PercolateContext extends SearchContext {
|
|||
return indexService.analysisService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexQueryParserService queryParserService() {
|
||||
return indexService.queryParserService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SimilarityService similarityService() {
|
||||
return indexService.similarityService();
|
||||
|
|
|
@ -74,6 +74,7 @@ import org.elasticsearch.index.mapper.Uid;
|
|||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.percolator.QueryCollector.Count;
|
||||
|
@ -190,7 +191,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
indexShard.shardId().index().name(),
|
||||
request.indices()
|
||||
);
|
||||
Query aliasFilter = percolateIndexService.aliasFilter(filteringAliases);
|
||||
Query aliasFilter = percolateIndexService.aliasFilter(indexShard.getQueryShardContext(), filteringAliases);
|
||||
|
||||
SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id());
|
||||
final PercolateContext context = new PercolateContext(
|
||||
|
@ -198,7 +199,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
);
|
||||
SearchContext.setCurrent(context);
|
||||
try {
|
||||
ParsedDocument parsedDocument = parseRequest(percolateIndexService, request, context, request.shardId().getIndex());
|
||||
ParsedDocument parsedDocument = parseRequest(indexShard, request, context, request.shardId().getIndex());
|
||||
if (context.percolateQueries().isEmpty()) {
|
||||
return new PercolateShardResponse(context, request.shardId());
|
||||
}
|
||||
|
@ -258,7 +259,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
private ParsedDocument parseRequest(IndexService documentIndexService, PercolateShardRequest request, PercolateContext context, String index) {
|
||||
private ParsedDocument parseRequest(IndexShard shard, PercolateShardRequest request, PercolateContext context, String index) {
|
||||
BytesReference source = request.source();
|
||||
if (source == null || source.length() == 0) {
|
||||
return null;
|
||||
|
@ -276,6 +277,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
// not the in memory percolate doc
|
||||
String[] previousTypes = context.types();
|
||||
context.types(new String[]{TYPE_NAME});
|
||||
QueryShardContext queryShardContext = shard.getQueryShardContext();
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
String currentFieldName = null;
|
||||
|
@ -290,7 +292,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
throw new ElasticsearchParseException("Either specify doc or get, not both");
|
||||
}
|
||||
|
||||
MapperService mapperService = documentIndexService.mapperService();
|
||||
MapperService mapperService = shard.mapperService();
|
||||
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(request.documentType());
|
||||
doc = docMapper.getDocumentMapper().parse(source(parser).index(index).type(request.documentType()).flyweight(true));
|
||||
if (docMapper.getMapping() != null) {
|
||||
|
@ -312,12 +314,12 @@ public class PercolatorService extends AbstractComponent {
|
|||
if (context.percolateQuery() != null) {
|
||||
throw new ElasticsearchParseException("Either specify query or filter, not both");
|
||||
}
|
||||
context.percolateQuery(documentIndexService.queryParserService().parse(parser).query());
|
||||
context.percolateQuery(queryShardContext.parse(parser).query());
|
||||
} else if ("filter".equals(currentFieldName)) {
|
||||
if (context.percolateQuery() != null) {
|
||||
throw new ElasticsearchParseException("Either specify query or filter, not both");
|
||||
}
|
||||
Query filter = documentIndexService.queryParserService().parseInnerFilter(parser).query();
|
||||
Query filter = queryShardContext.parseInnerFilter(parser).query();
|
||||
context.percolateQuery(new ConstantScoreQuery(filter));
|
||||
} else if ("sort".equals(currentFieldName)) {
|
||||
parseSort(parser, context);
|
||||
|
|
|
@ -61,6 +61,7 @@ import org.elasticsearch.index.mapper.MappedFieldType.Loading;
|
|||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.search.stats.ShardSearchStats;
|
||||
import org.elasticsearch.index.search.stats.StatsGroupsParseElement;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
|
@ -559,7 +560,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, context);
|
||||
BytesReference run = (BytesReference) executable.run();
|
||||
try (XContentParser parser = XContentFactory.xContent(run).createParser(run)) {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry());
|
||||
QueryParseContext queryParseContext = indicesService.newQueryParserContext();
|
||||
queryParseContext.reset(parser);
|
||||
queryParseContext.parseFieldMatcher(parseFieldMatcher);
|
||||
parseSource(context, SearchSourceBuilder.parseSearchSource(parser, queryParseContext));
|
||||
|
@ -659,7 +660,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
if (source == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
final IndexShard indexShard = context.indexShard();
|
||||
QueryShardContext queryShardContext = indexShard.getQueryShardContext();
|
||||
context.from(source.from());
|
||||
context.size(source.size());
|
||||
ObjectFloatHashMap<String> indexBoostMap = source.indexBoost();
|
||||
|
@ -670,10 +672,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
}
|
||||
}
|
||||
if (source.query() != null) {
|
||||
context.parsedQuery(context.queryParserService().toQuery(source.query()));
|
||||
context.parsedQuery(queryShardContext.toQuery(source.query()));
|
||||
}
|
||||
if (source.postFilter() != null) {
|
||||
context.parsedPostFilter(context.queryParserService().toQuery(source.postFilter()));
|
||||
context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
|
||||
}
|
||||
if (source.sorts() != null) {
|
||||
XContentParser completeSortParser = null;
|
||||
|
@ -1178,8 +1180,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
try {
|
||||
long now = System.nanoTime();
|
||||
final IndexService indexService = indicesService.indexServiceSafe(indexShard.shardId().index().name());
|
||||
QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry());
|
||||
queryParseContext.parseFieldMatcher(indexService.queryParserService().parseFieldMatcher());
|
||||
QueryParseContext queryParseContext = indicesService.newQueryParserContext();
|
||||
queryParseContext.parseFieldMatcher(indexService.getIndexSettings().getParseFieldMatcher());
|
||||
ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexShard.getIndexSettings()
|
||||
.getNumberOfShards(),
|
||||
SearchType.QUERY_THEN_FETCH, entry.source().build(queryParseContext), entry.types(), entry.requestCache());
|
||||
|
|
|
@ -40,7 +40,7 @@ public class FilterParser implements Aggregator.Parser {
|
|||
|
||||
@Override
|
||||
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
|
||||
ParsedQuery filter = context.queryParserService().parseInnerFilter(parser);
|
||||
ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
|
||||
return new FilterAggregator.Factory(aggregationName, filter == null ? new MatchAllDocsQuery() : filter.query());
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ public class FiltersParser implements Aggregator.Parser {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
key = parser.currentName();
|
||||
} else {
|
||||
ParsedQuery filter = context.queryParserService().parseInnerFilter(parser);
|
||||
ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
filters.add(new FiltersAggregator.KeyedFilter(key, filter == null ? Queries.newMatchAllQuery() : filter.query()));
|
||||
}
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ public class FiltersParser implements Aggregator.Parser {
|
|||
keyed = false;
|
||||
int idx = 0;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
ParsedQuery filter = context.queryParserService().parseInnerFilter(parser);
|
||||
ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
filters.add(new FiltersAggregator.KeyedFilter(String.valueOf(idx), filter == null ? Queries.newMatchAllQuery()
|
||||
: filter.query()));
|
||||
idx++;
|
||||
|
|
|
@ -66,7 +66,7 @@ public class SignificantTermsParametersParser extends AbstractTermsParametersPar
|
|||
if (significanceHeuristicParser != null) {
|
||||
significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher(), context);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, BACKGROUND_FILTER)) {
|
||||
filter = context.queryParserService().parseInnerFilter(parser).query();
|
||||
filter = context.indexShard().getQueryShardContext().parseInnerFilter(parser).query();
|
||||
} else {
|
||||
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
|
||||
+ currentFieldName + "].", parser.getTokenLocation());
|
||||
|
|
|
@ -59,7 +59,7 @@ public class InnerHitsParseElement implements SearchParseElement {
|
|||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext searchContext) throws Exception {
|
||||
QueryShardContext context = searchContext.queryParserService().getShardContext();
|
||||
QueryShardContext context = searchContext.indexShard().getQueryShardContext();
|
||||
context.reset(parser);
|
||||
Map<String, InnerHitsContext.BaseInnerHits> innerHitsMap = parseInnerHits(parser, context, searchContext);
|
||||
if (innerHitsMap != null) {
|
||||
|
@ -149,7 +149,7 @@ public class InnerHitsParseElement implements SearchParseElement {
|
|||
if (documentMapper == null) {
|
||||
throw new IllegalArgumentException("type [" + type + "] doesn't exist");
|
||||
}
|
||||
return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), context.mapperService(), documentMapper);
|
||||
return new InnerHitsContext.ParentChildInnerHits(parseResult.context(), parseResult.query(), parseResult.childInnerHits(), context.getMapperService(), documentMapper);
|
||||
}
|
||||
|
||||
private InnerHitsContext.NestedInnerHits parseNested(XContentParser parser, QueryShardContext context, SearchContext searchContext, String nestedPath) throws Exception {
|
||||
|
@ -178,7 +178,7 @@ public class InnerHitsParseElement implements SearchParseElement {
|
|||
fieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("query".equals(fieldName)) {
|
||||
Query q = searchContext.queryParserService().parseInnerQuery(context);
|
||||
Query q = context.parseInnerQuery();
|
||||
query = new ParsedQuery(q, context.copyNamedQueries());
|
||||
} else if ("inner_hits".equals(fieldName)) {
|
||||
childInnerHits = parseInnerHits(parser, context, searchContext);
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.search.highlight;
|
|||
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -66,13 +66,13 @@ public class HighlighterParseElement implements SearchParseElement {
|
|||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
try {
|
||||
context.highlight(parse(parser, context.queryParserService()));
|
||||
context.highlight(parse(parser, context.indexShard().getQueryShardContext()));
|
||||
} catch (IllegalArgumentException ex) {
|
||||
throw new SearchParseException(context, "Error while trying to parse Highlighter element in request", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
public SearchContextHighlight parse(XContentParser parser, IndexQueryParserService queryParserService) throws IOException {
|
||||
public SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String topLevelFieldName = null;
|
||||
final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = new ArrayList<>();
|
||||
|
@ -111,7 +111,7 @@ public class HighlighterParseElement implements SearchParseElement {
|
|||
}
|
||||
highlightFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryParserService)));
|
||||
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -167,11 +167,11 @@ public class HighlighterParseElement implements SearchParseElement {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
highlightFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryParserService)));
|
||||
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
|
||||
}
|
||||
}
|
||||
} else if ("highlight_query".equals(topLevelFieldName) || "highlightQuery".equals(topLevelFieldName)) {
|
||||
globalOptionsBuilder.highlightQuery(queryParserService.parse(parser).query());
|
||||
globalOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -189,7 +189,7 @@ public class HighlighterParseElement implements SearchParseElement {
|
|||
return new SearchContextHighlight(fields);
|
||||
}
|
||||
|
||||
protected SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, IndexQueryParserService queryParserService) throws IOException {
|
||||
protected SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
|
||||
final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
|
||||
|
@ -252,7 +252,7 @@ public class HighlighterParseElement implements SearchParseElement {
|
|||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("highlight_query".equals(fieldName) || "highlightQuery".equals(fieldName)) {
|
||||
fieldOptionsBuilder.highlightQuery(queryParserService.parse(parser).query());
|
||||
fieldOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
|
||||
} else if ("options".equals(fieldName)) {
|
||||
fieldOptionsBuilder.options(parser.map());
|
||||
}
|
||||
|
|
|
@ -35,14 +35,12 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.*;
|
||||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
@ -193,7 +191,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
// initialize the filtering alias based on the provided filters
|
||||
aliasFilter = indexService.aliasFilter(request.filteringAliases());
|
||||
aliasFilter = indexService.aliasFilter(indexShard.getQueryShardContext(), request.filteringAliases());
|
||||
|
||||
if (query() == null) {
|
||||
parsedQuery(ParsedQuery.parsedMatchAllQuery());
|
||||
|
@ -430,11 +428,6 @@ public class DefaultSearchContext extends SearchContext {
|
|||
return indexService.analysisService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexQueryParserService queryParserService() {
|
||||
return indexService.queryParserService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SimilarityService similarityService() {
|
||||
return indexService.similarityService();
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
@ -257,11 +256,6 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
return in.analysisService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexQueryParserService queryParserService() {
|
||||
return in.queryParserService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SimilarityService similarityService() {
|
||||
return in.similarityService();
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
|
@ -206,8 +205,6 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple
|
|||
|
||||
public abstract AnalysisService analysisService();
|
||||
|
||||
public abstract IndexQueryParserService queryParserService();
|
||||
|
||||
public abstract SimilarityService similarityService();
|
||||
|
||||
public abstract ScriptService scriptService();
|
||||
|
|
|
@ -33,7 +33,7 @@ public class FilterBinaryParseElement implements SearchParseElement {
|
|||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
byte[] filterSource = parser.binaryValue();
|
||||
try (XContentParser fSourceParser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
|
||||
ParsedQuery filter = context.queryParserService().parseInnerFilter(fSourceParser);
|
||||
ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(fSourceParser);
|
||||
if (filter != null) {
|
||||
context.parsedPostFilter(filter);
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ public class PostFilterParseElement implements SearchParseElement {
|
|||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
ParsedQuery postFilter = context.queryParserService().parseInnerFilter(parser);
|
||||
ParsedQuery postFilter = context.indexShard().getQueryShardContext().parseInnerFilter(parser);
|
||||
if (postFilter != null) {
|
||||
context.parsedPostFilter(postFilter);
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ public class QueryBinaryParseElement implements SearchParseElement {
|
|||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
byte[] querySource = parser.binaryValue();
|
||||
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
|
||||
context.parsedQuery(context.queryParserService().parse(qSourceParser));
|
||||
context.parsedQuery(context.indexShard().getQueryShardContext().parse(qSourceParser));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -30,6 +30,6 @@ public class QueryParseElement implements SearchParseElement {
|
|||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
context.parsedQuery(context.queryParserService().parse(parser));
|
||||
context.parsedQuery(context.indexShard().getQueryShardContext().parse(parser));
|
||||
}
|
||||
}
|
|
@ -183,7 +183,7 @@ public final class QueryRescorer implements Rescorer {
|
|||
private static final ObjectParser<QueryRescoreContext, SearchContext> RESCORE_PARSER = new ObjectParser<>("query", null);
|
||||
|
||||
static {
|
||||
RESCORE_PARSER.declareObject(QueryRescoreContext::setParsedQuery, (p, c) -> c.queryParserService().parse(p), new ParseField("rescore_query"));
|
||||
RESCORE_PARSER.declareObject(QueryRescoreContext::setParsedQuery, (p, c) -> c.indexShard().getQueryShardContext().parse(p), new ParseField("rescore_query"));
|
||||
RESCORE_PARSER.declareFloat(QueryRescoreContext::setQueryWeight, new ParseField("query_weight"));
|
||||
RESCORE_PARSER.declareFloat(QueryRescoreContext::setRescoreQueryWeight, new ParseField("rescore_query_weight"));
|
||||
RESCORE_PARSER.declareString(QueryRescoreContext::setScoreMode, new ParseField("score_mode"));
|
||||
|
|
|
@ -72,7 +72,7 @@ public class GeoDistanceSortParser implements SortParser {
|
|||
MultiValueMode sortMode = null;
|
||||
NestedInnerQueryParseSupport nestedHelper = null;
|
||||
|
||||
final boolean indexCreatedBeforeV2_0 = context.queryParserService().getIndexCreatedVersion().before(Version.V_2_0_0);
|
||||
final boolean indexCreatedBeforeV2_0 = context.indexShard().getIndexSettings().getIndexVersionCreated().before(Version.V_2_0_0);
|
||||
boolean coerce = false;
|
||||
boolean ignoreMalformed = false;
|
||||
|
||||
|
|
|
@ -21,12 +21,10 @@ package org.elasticsearch.search.suggest;
|
|||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public interface SuggestContextParser {
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException;
|
||||
SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, HasContextAndHeaders headersContext) throws IOException;
|
||||
|
||||
}
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.common.HasContextAndHeaders;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
|
@ -45,13 +44,13 @@ public final class SuggestParseElement implements SearchParseElement {
|
|||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.queryParserService(),
|
||||
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(),
|
||||
context.shardTarget().index(), context.shardTarget().shardId(), context);
|
||||
context.suggest(suggestionSearchContext);
|
||||
}
|
||||
|
||||
public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, String index, int shardId, HasContextAndHeaders headersContext) throws IOException {
|
||||
String index, int shardId, HasContextAndHeaders headersContext) throws IOException {
|
||||
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
|
||||
|
||||
BytesRef globalText = null;
|
||||
|
@ -90,7 +89,7 @@ public final class SuggestParseElement implements SearchParseElement {
|
|||
throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported");
|
||||
}
|
||||
final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();
|
||||
suggestionContext = contextParser.parse(parser, mapperService, queryParserService, headersContext);
|
||||
suggestionContext = contextParser.parse(parser, mapperService, headersContext);
|
||||
}
|
||||
}
|
||||
if (suggestionContext != null) {
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest;
|
|||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
|
||||
|
@ -43,13 +44,13 @@ public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
|
|||
}
|
||||
|
||||
@Inject
|
||||
public Suggesters(Map<String, Suggester> suggesters, ScriptService scriptService) {
|
||||
this(addBuildIns(suggesters, scriptService));
|
||||
public Suggesters(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
|
||||
this(addBuildIns(suggesters, scriptService, indexServices));
|
||||
}
|
||||
|
||||
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters, ScriptService scriptService) {
|
||||
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
|
||||
final Map<String, Suggester> map = new HashMap<>();
|
||||
map.put("phrase", new PhraseSuggester(scriptService));
|
||||
map.put("phrase", new PhraseSuggester(scriptService, indexServices));
|
||||
map.put("term", new TermSuggester());
|
||||
map.put("completion", new CompletionSuggester());
|
||||
map.putAll(suggesters);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.search.suggest.completion;
|
||||
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -26,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.search.suggest.SuggestContextParser;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
import org.elasticsearch.search.suggest.context.ContextMapping.ContextQuery;
|
||||
|
@ -48,9 +48,9 @@ public class CompletionSuggestParser implements SuggestContextParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, HasContextAndHeaders headersContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
|
||||
String fieldName = null;
|
||||
CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester);
|
||||
|
||||
|
@ -60,7 +60,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (!parseSuggestContext(parser, mapperService, fieldName, suggestion, queryParserService.parseFieldMatcher())) {
|
||||
if (!parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) {
|
||||
if (token == XContentParser.Token.VALUE_BOOLEAN && "fuzzy".equals(fieldName)) {
|
||||
suggestion.setFuzzy(parser.booleanValue());
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fuzzyConfigName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (queryParserService.parseFieldMatcher().match(fuzzyConfigName, Fuzziness.FIELD)) {
|
||||
if (parseFieldMatcher.match(fuzzyConfigName, Fuzziness.FIELD)) {
|
||||
suggestion.setFuzzyEditDistance(Fuzziness.parse(parser).asDistance());
|
||||
} else if ("transpositions".equals(fuzzyConfigName)) {
|
||||
suggestion.setFuzzyTranspositions(parser.booleanValue());
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|||
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.Template;
|
||||
|
@ -50,9 +49,9 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
|||
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
HasContextAndHeaders headersContext) throws IOException {
|
||||
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
|
||||
suggestion.setQueryParserService(queryParserService);
|
||||
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
boolean gramSizeSet = false;
|
||||
|
@ -60,7 +59,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, queryParserService.parseFieldMatcher())) {
|
||||
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) {
|
||||
if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) {
|
||||
suggestion.setRealWordErrorLikelihood(parser.floatValue());
|
||||
if (suggestion.realworldErrorLikelyhood() <= 0.0) {
|
||||
|
@ -106,7 +105,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
|||
fieldName = parser.currentName();
|
||||
}
|
||||
if (token.isValue()) {
|
||||
parseCandidateGenerator(parser, mapperService, fieldName, generator, queryParserService.parseFieldMatcher());
|
||||
parseCandidateGenerator(parser, mapperService, fieldName, generator, parseFieldMatcher);
|
||||
}
|
||||
}
|
||||
verifyGenerator(generator);
|
||||
|
@ -141,7 +140,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
|||
if (suggestion.getCollateQueryScript() != null) {
|
||||
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
|
||||
}
|
||||
Template template = Template.parse(parser, queryParserService.parseFieldMatcher());
|
||||
Template template = Template.parse(parser, parseFieldMatcher);
|
||||
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH,
|
||||
headersContext);
|
||||
suggestion.setCollateQueryScript(compiledScript);
|
||||
|
|
|
@ -29,12 +29,13 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -55,10 +56,11 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
|
|||
private final BytesRef SEPARATOR = new BytesRef(" ");
|
||||
private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion";
|
||||
private final ScriptService scriptService;
|
||||
private final IndicesService indicesService;
|
||||
|
||||
@Inject
|
||||
public PhraseSuggester(ScriptService scriptService) {
|
||||
public PhraseSuggester(ScriptService scriptService, IndicesService indicesService) {
|
||||
this.scriptService = scriptService;
|
||||
this.indicesService = indicesService;
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -117,7 +119,9 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
|
|||
vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
|
||||
final ExecutableScript executable = scriptService.executable(collateScript, vars);
|
||||
final BytesReference querySource = (BytesReference) executable.run();
|
||||
final ParsedQuery parsedQuery = suggestion.getQueryParserService().parse(querySource);
|
||||
IndexService indexService = indicesService.indexService(suggestion.getIndex());
|
||||
IndexShard shard = indexService.getShard(suggestion.getShard());
|
||||
final ParsedQuery parsedQuery = shard.getQueryShardContext().parse(querySource);
|
||||
collateMatch = Lucene.exists(searcher, parsedQuery.query());
|
||||
}
|
||||
if (!collateMatch && !collatePrune) {
|
||||
|
|
|
@ -25,7 +25,6 @@ import java.util.Map;
|
|||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
|
||||
import org.elasticsearch.search.suggest.Suggester;
|
||||
|
@ -33,7 +32,6 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContex
|
|||
|
||||
class PhraseSuggestionContext extends SuggestionContext {
|
||||
private final BytesRef SEPARATOR = new BytesRef(" ");
|
||||
private IndexQueryParserService queryParserService;
|
||||
private float maxErrors = 0.5f;
|
||||
private BytesRef separator = SEPARATOR;
|
||||
private float realworldErrorLikelihood = 0.95f;
|
||||
|
@ -112,14 +110,6 @@ class PhraseSuggestionContext extends SuggestionContext {
|
|||
return scorer;
|
||||
}
|
||||
|
||||
public void setQueryParserService(IndexQueryParserService queryParserService) {
|
||||
this.queryParserService = queryParserService;
|
||||
}
|
||||
|
||||
public IndexQueryParserService getQueryParserService() {
|
||||
return queryParserService;
|
||||
}
|
||||
|
||||
static class DirectCandidateGenerator extends DirectSpellcheckerSettings {
|
||||
private Analyzer preFilter;
|
||||
private Analyzer postFilter;
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.elasticsearch.common.HasContextAndHeaders;
|
|||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
|
||||
import org.elasticsearch.search.suggest.SuggestContextParser;
|
||||
import org.elasticsearch.search.suggest.SuggestUtils;
|
||||
|
@ -40,7 +39,7 @@ public final class TermSuggestParser implements SuggestContextParser {
|
|||
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
HasContextAndHeaders headersContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
TermSuggestionContext suggestion = new TermSuggestionContext(suggester);
|
||||
|
@ -49,7 +48,7 @@ public final class TermSuggestParser implements SuggestContextParser {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
parseTokenValue(parser, mapperService, fieldName, suggestion, settings, queryParserService.parseFieldMatcher());
|
||||
parseTokenValue(parser, mapperService, fieldName, suggestion, settings, mapperService.getIndexSettings().getParseFieldMatcher());
|
||||
} else {
|
||||
throw new IllegalArgumentException("suggester[term] doesn't support field [" + fieldName + "]");
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.InvalidAliasNameException;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
|
@ -73,6 +74,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testFilteringAliases() throws Exception {
|
||||
IndexService indexService = newIndexService();
|
||||
IndexShard shard = indexService.getShard(0);
|
||||
add(indexService, "cats", filter(termQuery("animal", "cat")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "dog")));
|
||||
add(indexService, "all", null);
|
||||
|
@ -81,41 +83,44 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
|
|||
assertThat(indexService.getMetaData().getAliases().containsKey("dogs"), equalTo(true));
|
||||
assertThat(indexService.getMetaData().getAliases().containsKey("turtles"), equalTo(false));
|
||||
|
||||
assertThat(indexService.aliasFilter("cats").toString(), equalTo("animal:cat"));
|
||||
assertThat(indexService.aliasFilter("cats", "dogs").toString(), equalTo("animal:cat animal:dog"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats").toString(), equalTo("animal:cat"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats", "dogs").toString(), equalTo("animal:cat animal:dog"));
|
||||
|
||||
// Non-filtering alias should turn off all filters because filters are ORed
|
||||
assertThat(indexService.aliasFilter("all"), nullValue());
|
||||
assertThat(indexService.aliasFilter("cats", "all"), nullValue());
|
||||
assertThat(indexService.aliasFilter("all", "cats"), nullValue());
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "all"), nullValue());
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "cats", "all"), nullValue());
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "all", "cats"), nullValue());
|
||||
|
||||
add(indexService, "cats", filter(termQuery("animal", "feline")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "canine")));
|
||||
assertThat(indexService.aliasFilter("dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
}
|
||||
|
||||
public void testAliasFilters() throws Exception {
|
||||
IndexService indexService = newIndexService();
|
||||
IndexShard shard = indexService.getShard(0);
|
||||
|
||||
add(indexService, "cats", filter(termQuery("animal", "cat")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "dog")));
|
||||
|
||||
assertThat(indexService.aliasFilter(), nullValue());
|
||||
assertThat(indexService.aliasFilter("dogs").toString(), equalTo("animal:dog"));
|
||||
assertThat(indexService.aliasFilter("dogs", "cats").toString(), equalTo("animal:dog animal:cat"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext()), nullValue());
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs").toString(), equalTo("animal:dog"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:dog animal:cat"));
|
||||
|
||||
add(indexService, "cats", filter(termQuery("animal", "feline")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "canine")));
|
||||
|
||||
assertThat(indexService.aliasFilter("dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
assertThat(indexService.aliasFilter(shard.getQueryShardContext(), "dogs", "cats").toString(), equalTo("animal:canine animal:feline"));
|
||||
}
|
||||
|
||||
public void testRemovedAliasFilter() throws Exception {
|
||||
IndexService indexService = newIndexService();
|
||||
IndexShard shard = indexService.getShard(0);
|
||||
|
||||
add(indexService, "cats", filter(termQuery("animal", "cat")));
|
||||
remove(indexService, "cats");
|
||||
try {
|
||||
indexService.aliasFilter("cats");
|
||||
indexService.aliasFilter(shard.getQueryShardContext(), "cats");
|
||||
fail("Expected InvalidAliasNameException");
|
||||
} catch (InvalidAliasNameException e) {
|
||||
assertThat(e.getMessage(), containsString("Invalid alias name [cats]"));
|
||||
|
@ -124,11 +129,13 @@ public class IndexServiceTests extends ESSingleNodeTestCase {
|
|||
|
||||
public void testUnknownAliasFilter() throws Exception {
|
||||
IndexService indexService = newIndexService();
|
||||
IndexShard shard = indexService.getShard(0);
|
||||
|
||||
add(indexService, "cats", filter(termQuery("animal", "cat")));
|
||||
add(indexService, "dogs", filter(termQuery("animal", "dog")));
|
||||
|
||||
try {
|
||||
indexService.aliasFilter("unknown");
|
||||
indexService.aliasFilter(shard.getQueryShardContext(), "unknown");
|
||||
fail();
|
||||
} catch (InvalidAliasNameException e) {
|
||||
// all is well
|
||||
|
|
|
@ -66,6 +66,7 @@ import org.elasticsearch.index.analysis.AnalysisService;
|
|||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper;
|
||||
|
@ -75,6 +76,7 @@ import org.elasticsearch.indices.IndicesModule;
|
|||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.*;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -122,10 +124,17 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
private static final int NUMBER_OF_TESTQUERIES = 20;
|
||||
|
||||
private static Injector injector;
|
||||
private static IndexQueryParserService queryParserService;
|
||||
private static IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private static QueryShardContext queryShardContext;
|
||||
private static IndexFieldDataService indexFieldDataService;
|
||||
|
||||
protected static IndexQueryParserService queryParserService() {
|
||||
return queryParserService;
|
||||
|
||||
protected static QueryShardContext queryShardContext() {
|
||||
return queryShardContext;
|
||||
}
|
||||
|
||||
protected static IndexFieldDataService indexFieldDataService() {
|
||||
return indexFieldDataService;
|
||||
}
|
||||
|
||||
private static Index index;
|
||||
|
@ -233,9 +242,13 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
}
|
||||
}
|
||||
).createInjector();
|
||||
queryParserService = injector.getInstance(IndexQueryParserService.class);
|
||||
|
||||
MapperService mapperService = queryParserService.mapperService;
|
||||
SimilarityService similarityService = injector.getInstance(SimilarityService.class);
|
||||
indexFieldDataService = injector.getInstance(IndexFieldDataService.class);
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
MapperService mapperService = injector.getInstance(MapperService.class);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
|
||||
indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
|
||||
queryShardContext = new QueryShardContext(idxSettings, proxy, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
|
||||
//create some random type with some default field, those types will stick around for all of the subclasses
|
||||
currentTypes = new String[randomIntBetween(0, 5)];
|
||||
for (int i = 0; i < currentTypes.length; i++) {
|
||||
|
@ -264,10 +277,12 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
terminate(injector.getInstance(ThreadPool.class));
|
||||
injector = null;
|
||||
index = null;
|
||||
queryParserService = null;
|
||||
queryShardContext = null;
|
||||
currentTypes = null;
|
||||
namedWriteableRegistry = null;
|
||||
randomTypes = null;
|
||||
indicesQueriesRegistry = null;
|
||||
indexFieldDataService = null;
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -542,7 +557,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
}
|
||||
|
||||
private QueryParser<?> queryParser(String queryId) {
|
||||
return queryParserService.indicesQueriesRegistry().queryParsers().get(queryId);
|
||||
return indicesQueriesRegistry.queryParsers().get(queryId);
|
||||
}
|
||||
|
||||
//we use the streaming infra to create a copy of the query provided as argument
|
||||
|
@ -562,7 +577,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
* @return a new {@link QueryShardContext} based on the base test index and queryParserService
|
||||
*/
|
||||
protected static QueryShardContext createShardContext() {
|
||||
QueryShardContext queryCreationContext = new QueryShardContext(queryParserService);
|
||||
QueryShardContext queryCreationContext = queryShardContext.clone();
|
||||
queryCreationContext.reset();
|
||||
queryCreationContext.parseFieldMatcher(ParseFieldMatcher.STRICT);
|
||||
return queryCreationContext;
|
||||
|
@ -572,7 +587,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
* @return a new {@link QueryParseContext} based on the base test index and queryParserService
|
||||
*/
|
||||
protected static QueryParseContext createParseContext() {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(queryParserService.indicesQueriesRegistry());
|
||||
QueryParseContext queryParseContext = new QueryParseContext(indicesQueriesRegistry);
|
||||
queryParseContext.reset(null);
|
||||
queryParseContext.parseFieldMatcher(ParseFieldMatcher.STRICT);
|
||||
return queryParseContext;
|
||||
|
|
|
@ -54,7 +54,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
MapperService mapperService = queryParserService().mapperService;
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
|
@ -75,8 +75,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
}
|
||||
|
||||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryParserService().mapperService;
|
||||
final IndexFieldDataService fieldData = queryParserService().fieldDataService;
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
private InnerHitsContext context;
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
MapperService mapperService = queryParserService().mapperService;
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
|
@ -70,8 +70,8 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
}
|
||||
|
||||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryParserService().mapperService;
|
||||
final IndexFieldDataService fieldData = queryParserService().fieldDataService;
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
private InnerHitsContext context;
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
MapperService mapperService = queryParserService().mapperService;
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge("nested_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("nested_doc",
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
|
@ -57,8 +57,8 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
|
||||
@Override
|
||||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryParserService().mapperService;
|
||||
final IndexFieldDataService fieldData = queryParserService().fieldDataService;
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
private InnerHitsContext context;
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
|
||||
// Create timestamp option only then we have a date mapper,
|
||||
// otherwise we could trigger exception.
|
||||
if (createShardContext().mapperService().smartNameFieldType(DATE_FIELD_NAME) != null) {
|
||||
if (createShardContext().getMapperService().smartNameFieldType(DATE_FIELD_NAME) != null) {
|
||||
if (randomBoolean()) {
|
||||
query.timeZone(randomTimeZone());
|
||||
}
|
||||
|
|
|
@ -267,7 +267,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
|
||||
// the remaining tests requires either a mapping that we register with types in base test setup
|
||||
// no strict field resolution (version before V_1_4_0_Beta1)
|
||||
if (getCurrentTypes().length > 0 || shardContext.indexQueryParserService().getIndexCreatedVersion().before(Version.V_1_4_0_Beta1)) {
|
||||
if (getCurrentTypes().length > 0 || shardContext.indexVersionCreated().before(Version.V_1_4_0_Beta1)) {
|
||||
Query luceneQuery = queryBuilder.toQuery(shardContext);
|
||||
assertThat(luceneQuery, instanceOf(BooleanQuery.class));
|
||||
TermQuery termQuery = (TermQuery) ((BooleanQuery) luceneQuery).clauses().get(0).getQuery();
|
||||
|
|
|
@ -44,13 +44,17 @@ import org.elasticsearch.index.analysis.AnalysisService;
|
|||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -121,9 +125,13 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
).createInjector();
|
||||
|
||||
IndexQueryParserService queryParserService = injector.getInstance(IndexQueryParserService.class);
|
||||
context = new QueryShardContext(queryParserService);
|
||||
SimilarityService similarityService = injector.getInstance(SimilarityService.class);
|
||||
IndexFieldDataService indexFieldDataService = injector.getInstance(IndexFieldDataService.class);
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
MapperService mapperService = injector.getInstance(MapperService.class);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new IndicesWarmer(idxSettings.getNodeSettings(), null));
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class);
|
||||
context = new QueryShardContext(idxSettings, proxy, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -56,7 +56,7 @@ public class WrapperQueryBuilderTests extends AbstractQueryTestCase<WrapperQuery
|
|||
@Override
|
||||
protected void doAssertLuceneQuery(WrapperQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
try (XContentParser qSourceParser = XContentFactory.xContent(queryBuilder.source()).createParser(queryBuilder.source())) {
|
||||
final QueryShardContext contextCopy = new QueryShardContext(context.indexQueryParserService());
|
||||
final QueryShardContext contextCopy = context.clone();
|
||||
contextCopy.reset(qSourceParser);
|
||||
QueryBuilder<?> innerQuery = contextCopy.parseContext().parseInnerQueryBuilder();
|
||||
Query expected = innerQuery.toQuery(context);
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -66,15 +66,14 @@ public class CustomQueryParserIT extends ESIntegTestCase {
|
|||
assertHitCount(client().prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryParserPlugin.DummyQueryBuilder())).get(), 1l);
|
||||
}
|
||||
|
||||
private static IndexQueryParserService queryParser() {
|
||||
private static QueryShardContext queryShardContext() {
|
||||
IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class);
|
||||
return indicesService.indexServiceSafe("index").queryParserService();
|
||||
return indicesService.indexServiceSafe("index").getQueryShardContext();
|
||||
}
|
||||
|
||||
//see #11120
|
||||
public void testConstantScoreParsesFilter() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
Query q = constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
|
||||
Query q = constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryShardContext());
|
||||
Query inner = ((ConstantScoreQuery) q).getQuery();
|
||||
assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class));
|
||||
assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter);
|
||||
|
@ -82,13 +81,12 @@ public class CustomQueryParserIT extends ESIntegTestCase {
|
|||
|
||||
//see #11120
|
||||
public void testBooleanParsesFilter() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
// single clause, serialized as inner object
|
||||
Query q = boolQuery()
|
||||
.should(new DummyQueryParserPlugin.DummyQueryBuilder())
|
||||
.must(new DummyQueryParserPlugin.DummyQueryBuilder())
|
||||
.filter(new DummyQueryParserPlugin.DummyQueryBuilder())
|
||||
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
|
||||
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryShardContext());
|
||||
assertThat(q, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery bq = (BooleanQuery) q;
|
||||
assertEquals(4, bq.clauses().size());
|
||||
|
@ -113,7 +111,7 @@ public class CustomQueryParserIT extends ESIntegTestCase {
|
|||
.should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder())
|
||||
.must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder())
|
||||
.filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder())
|
||||
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
|
||||
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryShardContext());
|
||||
assertThat(q, instanceOf(BooleanQuery.class));
|
||||
bq = (BooleanQuery) q;
|
||||
assertEquals(8, bq.clauses().size());
|
||||
|
|
|
@ -22,11 +22,9 @@ package org.elasticsearch.index.search;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -39,7 +37,6 @@ import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
|
|||
|
||||
public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
||||
|
||||
private IndexQueryParserService queryParser;
|
||||
private IndexService indexService;
|
||||
|
||||
@Before
|
||||
|
@ -64,11 +61,10 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
|||
"}";
|
||||
mapperService.merge("person", new CompressedXContent(mapping), true, false);
|
||||
this.indexService = indexService;
|
||||
queryParser = indexService.queryParserService();
|
||||
}
|
||||
|
||||
public void testCrossFieldMultiMatchQuery() throws IOException {
|
||||
QueryShardContext queryShardContext = new QueryShardContext(queryParser);
|
||||
QueryShardContext queryShardContext = indexService.getShard(0).getQueryShardContext();
|
||||
queryShardContext.setAllowUnmappedFields(true);
|
||||
Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
|
||||
try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) {
|
||||
|
|
|
@ -1015,8 +1015,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
ShardRouting routing = new ShardRouting(shard.routingEntry());
|
||||
shard.close("simon says", true);
|
||||
IndexServicesProvider indexServices = indexService.getIndexServices();
|
||||
IndexServicesProvider newProvider = new IndexServicesProvider(indexServices.getIndexEventListener(), indexServices.getThreadPool(), indexServices.getMapperService(), indexServices.getQueryParserService(), indexServices.getIndexCache(), indexServices.getIndicesQueryCache(), indexServices.getCodecService(), indexServices.getTermVectorsService(), indexServices.getIndexFieldDataService(), indexServices.getWarmer(), indexServices.getSimilarityService(), indexServices.getFactory(), indexServices.getBigArrays(), indexServices.getIndexingMemoryController());
|
||||
IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), shard.store(), wrapper, newProvider);
|
||||
IndexShard newShard = new IndexShard(shard.shardId(), indexService.getIndexSettings(), shard.shardPath(), shard.store(), wrapper, indexServices);
|
||||
ShardRoutingHelper.reinit(routing);
|
||||
newShard.updateRoutingEntry(routing, false);
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
|
|
|
@ -20,11 +20,7 @@ package org.elasticsearch.search.suggest;
|
|||
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
@ -58,15 +54,11 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
|
|||
|
||||
@Override
|
||||
public SuggestContextParser getContextParser() {
|
||||
return new SuggestContextParser() {
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
Map<String, Object> options = parser.map();
|
||||
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
|
||||
suggestionContext.setField((String) options.get("field"));
|
||||
return suggestionContext;
|
||||
}
|
||||
return (parser, mapperService, headersContext) -> {
|
||||
Map<String, Object> options = parser.map();
|
||||
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
|
||||
suggestionContext.setField((String) options.get("field"));
|
||||
return suggestionContext;
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
@ -311,14 +310,7 @@ public class TestSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AnalysisService analysisService() {
|
||||
return indexService.analysisService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexQueryParserService queryParserService() {
|
||||
return indexService.queryParserService();
|
||||
}
|
||||
public AnalysisService analysisService() { return indexService.analysisService();}
|
||||
|
||||
@Override
|
||||
public SimilarityService similarityService() {
|
||||
|
|
Loading…
Reference in New Issue