Merge branch 'master' into feature/query-refactoring

Conflicts:
	core/src/main/java/org/elasticsearch/index/query/ConstantScoreQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/NotQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/ScriptQueryParser.java
This commit is contained in:
javanna 2015-07-02 15:29:48 +02:00 committed by Luca Cavanna
commit 63530631f9
100 changed files with 615 additions and 614 deletions

View File

@ -168,7 +168,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
DefaultSearchContext searchContext = new DefaultSearchContext(0,
new ShardSearchLocalRequest(request.types(), request.nowInMillis(), request.filteringAliases()),
null, searcher, indexService, indexShard,
scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter()
scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher
);
SearchContext.setCurrent(searchContext);
try {
@ -187,10 +187,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
} catch (QueryParsingException e) {
valid = false;
error = e.getDetailedMessage();
} catch (AssertionError e) {
valid = false;
error = e.getMessage();
} catch (IOException e) {
} catch (AssertionError|IOException e) {
valid = false;
error = e.getMessage();
} finally {

View File

@ -151,7 +151,7 @@ public class TransportExistsAction extends TransportBroadcastAction<ExistsReques
SearchContext context = new DefaultSearchContext(0,
new ShardSearchLocalRequest(request.types(), request.nowInMillis(), request.filteringAliases()),
shardTarget, indexShard.acquireSearcher("exists"), indexService, indexShard,
scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter());
scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher);
SearchContext.setCurrent(context);
try {

View File

@ -113,7 +113,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
0, new ShardSearchLocalRequest(new String[]{request.type()}, request.nowInMillis, request.filteringAlias()),
null, result.searcher(), indexService, indexShard,
scriptService, pageCacheRecycler,
bigArrays, threadPool.estimatedTimeInMillisCounter()
bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher
);
SearchContext.setCurrent(context);

View File

@ -26,6 +26,7 @@ import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -239,7 +240,7 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
* "query_then_fetch"/"queryThenFetch", and "query_and_fetch"/"queryAndFetch".
*/
public SearchRequest searchType(String searchType) {
return searchType(SearchType.fromString(searchType));
return searchType(SearchType.fromString(searchType, ParseFieldMatcher.EMPTY));
}
/**

View File

@ -20,6 +20,7 @@
package org.elasticsearch.action.search;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
/**
* Search type represent the manner at which the search operation is executed.
@ -108,7 +109,7 @@ public enum SearchType {
* one of "dfs_query_then_fetch"/"dfsQueryThenFetch", "dfs_query_and_fetch"/"dfsQueryAndFetch",
* "query_then_fetch"/"queryThenFetch", "query_and_fetch"/"queryAndFetch", and "scan".
*/
public static SearchType fromString(String searchType) {
public static SearchType fromString(String searchType, ParseFieldMatcher parseFieldMatcher) {
if (searchType == null) {
return SearchType.DEFAULT;
}
@ -122,7 +123,7 @@ public enum SearchType {
return SearchType.QUERY_AND_FETCH;
} else if ("scan".equals(searchType)) {
return SearchType.SCAN;
} else if (COUNT_VALUE.match(searchType)) {
} else if (parseFieldMatcher.match(searchType, COUNT_VALUE)) {
return SearchType.COUNT;
} else {
throw new IllegalArgumentException("No search type for [" + searchType + "]");

View File

@ -20,6 +20,7 @@
package org.elasticsearch.action.support;
import org.elasticsearch.action.*;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
@ -37,9 +38,11 @@ public abstract class TransportAction<Request extends ActionRequest, Response ex
protected final ThreadPool threadPool;
protected final String actionName;
private final ActionFilter[] filters;
protected final ParseFieldMatcher parseFieldMatcher;
protected TransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters) {
super(settings);
this.parseFieldMatcher = new ParseFieldMatcher(settings);
this.actionName = actionName;
this.filters = actionFilters.filters();
this.threadPool = threadPool;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
@ -291,13 +292,13 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
public UpdateRequest addScriptParam(String name, Object value) {
Script script = script();
if (script == null) {
HashMap<String, Object> scriptParams = new HashMap<String, Object>();
HashMap<String, Object> scriptParams = new HashMap<>();
scriptParams.put(name, value);
updateOrCreateScript(null, null, null, scriptParams);
} else {
Map<String, Object> scriptParams = script.getParams();
if (scriptParams == null) {
scriptParams = new HashMap<String, Object>();
scriptParams = new HashMap<>();
scriptParams.put(name, value);
updateOrCreateScript(null, null, null, scriptParams);
} else {
@ -648,7 +649,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("script".equals(currentFieldName) && token == XContentParser.Token.START_OBJECT) {
script = Script.parse(parser);
//here we don't have settings available, unable to throw strict deprecation exceptions
script = Script.parse(parser, ParseFieldMatcher.EMPTY);
} else if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("scripted_upsert".equals(currentFieldName)) {
@ -666,7 +668,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
} else if ("detect_noop".equals(currentFieldName)) {
detectNoop(parser.booleanValue());
} else {
scriptParameterParser.token(currentFieldName, token, parser);
//here we don't have settings available, unable to throw deprecation exceptions
scriptParameterParser.token(currentFieldName, token, parser, ParseFieldMatcher.EMPTY);
}
}
// Don't have a script using the new API so see if it is specified with the old API

View File

@ -23,6 +23,7 @@ import java.util.EnumSet;
import java.util.HashSet;
/**
* Holds a field that can be found in a request while parsing and its different variants, which may be deprecated.
*/
public class ParseField {
private final String camelCaseName;
@ -30,9 +31,10 @@ public class ParseField {
private final String[] deprecatedNames;
private String allReplacedWith = null;
public static final EnumSet<Flag> EMPTY_FLAGS = EnumSet.noneOf(Flag.class);
static final EnumSet<Flag> EMPTY_FLAGS = EnumSet.noneOf(Flag.class);
static final EnumSet<Flag> STRICT_FLAGS = EnumSet.of(Flag.STRICT);
public static enum Flag {
enum Flag {
STRICT
}
@ -47,7 +49,7 @@ public class ParseField {
set.add(Strings.toCamelCase(depName));
set.add(Strings.toUnderscoreCase(depName));
}
this.deprecatedNames = set.toArray(new String[0]);
this.deprecatedNames = set.toArray(new String[set.size()]);
}
}
@ -78,11 +80,7 @@ public class ParseField {
return parseField;
}
public boolean match(String currentFieldName) {
return match(currentFieldName, EMPTY_FLAGS);
}
public boolean match(String currentFieldName, EnumSet<Flag> flags) {
boolean match(String currentFieldName, EnumSet<Flag> flags) {
if (allReplacedWith == null && (currentFieldName.equals(camelCaseName) || currentFieldName.equals(underscoreName))) {
return true;
}

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.IndexQueryParserService;
import java.util.EnumSet;
/**
* Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField}
* against a field name and throw deprecation exception depending on the current value of the {@link IndexQueryParserService#PARSE_STRICT} setting.
*/
public class ParseFieldMatcher {
public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(ParseField.EMPTY_FLAGS);
public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(ParseField.STRICT_FLAGS);
private final EnumSet<ParseField.Flag> parseFlags;
public ParseFieldMatcher(Settings settings) {
if (settings.getAsBoolean(IndexQueryParserService.PARSE_STRICT, false)) {
this.parseFlags = EnumSet.of(ParseField.Flag.STRICT);
} else {
this.parseFlags = ParseField.EMPTY_FLAGS;
}
}
public ParseFieldMatcher(EnumSet<ParseField.Flag> parseFlags) {
this.parseFlags = parseFlags;
}
/**
* Matches a {@link ParseField} against a field name, and throws deprecation exception depending on the current
* value of the {@link IndexQueryParserService#PARSE_STRICT} setting.
* @param fieldName the field name found in the request while parsing
* @param parseField the parse field that we are looking for
* @throws IllegalArgumentException whenever we are in strict mode and the request contained a deprecated field
* @return true whenever the parse field that we are looking for was found, false otherwise
*/
public boolean match(String fieldName, ParseField parseField) {
return parseField.match(fieldName, parseFlags);
}
}

View File

@ -21,9 +21,9 @@ package org.elasticsearch.index.mapper;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.collect.Tuple;
@ -38,35 +38,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.ByteFieldMapper;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.Murmur3FieldMapper;
import org.elasticsearch.index.mapper.core.ShortFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.core.*;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
import org.elasticsearch.index.mapper.internal.SizeFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
@ -96,6 +71,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
private final Object typeParsersMutex = new Object();
private final Version indexVersionCreated;
private final ParseFieldMatcher parseFieldMatcher;
private volatile ImmutableMap<String, Mapper.TypeParser> typeParsers;
private volatile ImmutableMap<String, Mapper.TypeParser> rootTypeParsers;
@ -103,6 +79,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
public DocumentMapperParser(Index index, @IndexSettings Settings indexSettings, MapperService mapperService, AnalysisService analysisService,
SimilarityLookupService similarityLookupService, ScriptService scriptService) {
super(index, indexSettings);
this.parseFieldMatcher = new ParseFieldMatcher(indexSettings);
this.mapperService = mapperService;
this.analysisService = analysisService;
this.similarityLookupService = similarityLookupService;
@ -168,7 +145,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
}
public Mapper.TypeParser.ParserContext parserContext() {
return new Mapper.TypeParser.ParserContext(analysisService, similarityLookupService, mapperService, typeParsers, indexVersionCreated);
return new Mapper.TypeParser.ParserContext(analysisService, similarityLookupService, mapperService, typeParsers, indexVersionCreated, parseFieldMatcher);
}
public DocumentMapper parse(String source) throws MapperParsingException {
@ -296,7 +273,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
}
private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Object> transformConfig, Version indexVersionCreated) {
Script script = Script.parse(transformConfig, true);
Script script = Script.parse(transformConfig, true, parseFieldMatcher);
if (script != null) {
docBuilder.transform(scriptService, script);
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
@ -92,14 +93,17 @@ public interface Mapper extends ToXContent, Iterable<Mapper> {
private final Version indexVersionCreated;
private final ParseFieldMatcher parseFieldMatcher;
public ParserContext(AnalysisService analysisService, SimilarityLookupService similarityLookupService,
MapperService mapperService,
ImmutableMap<String, TypeParser> typeParsers, Version indexVersionCreated) {
MapperService mapperService, ImmutableMap<String, TypeParser> typeParsers,
Version indexVersionCreated, ParseFieldMatcher parseFieldMatcher) {
this.analysisService = analysisService;
this.similarityLookupService = similarityLookupService;
this.mapperService = mapperService;
this.typeParsers = typeParsers;
this.indexVersionCreated = indexVersionCreated;
this.parseFieldMatcher = parseFieldMatcher;
}
public AnalysisService analysisService() {
@ -121,6 +125,10 @@ public interface Mapper extends ToXContent, Iterable<Mapper> {
public Version indexVersionCreated() {
return indexVersionCreated;
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
}
Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;

View File

@ -21,8 +21,6 @@ package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.ObjectArrayList;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef;
@ -35,7 +33,6 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.compress.NotXContentException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentParser;
@ -98,7 +95,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
if (parserContext.indexVersionCreated().before(Version.V_2_0_0) &&
(COMPRESS.match(fieldName) || COMPRESS_THRESHOLD.match(fieldName))) {
(parserContext.parseFieldMatcher().match(fieldName, COMPRESS) || parserContext.parseFieldMatcher().match(fieldName, COMPRESS_THRESHOLD))) {
iterator.remove();
}
}

View File

@ -175,19 +175,19 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
indexAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString());
iterator.remove();
} else if (Fields.SEARCH_ANALYZER.match(fieldName)) {
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.SEARCH_ANALYZER)) {
searchAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString());
iterator.remove();
} else if (fieldName.equals(Fields.PAYLOADS)) {
builder.payloads(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.PRESERVE_SEPARATORS.match(fieldName)) {
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.PRESERVE_SEPARATORS)) {
builder.preserveSeparators(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.PRESERVE_POSITION_INCREMENTS.match(fieldName)) {
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.PRESERVE_POSITION_INCREMENTS)) {
builder.preservePositionIncrements(Boolean.parseBoolean(fieldNode.toString()));
iterator.remove();
} else if (Fields.MAX_INPUT_LENGTH.match(fieldName)) {
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.MAX_INPUT_LENGTH)) {
builder.maxInputLength(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {

View File

@ -59,7 +59,7 @@ public class ConstantScoreQueryParser extends BaseQueryParser {
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
if (INNER_QUERY_FIELD.match(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) {
query = parseContext.parseInnerFilterToQueryBuilder();
queryFound = true;
} else {

View File

@ -84,7 +84,7 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
value = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if (FUZZINESS.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) {
fuzziness = Fuzziness.parse(parser);
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
prefixLength = parser.intValue();

View File

@ -96,7 +96,7 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
// type may not have been extracted yet, so use the
// XContentStructure.<type> facade to parse if available,
// or delay parsing if not.
if (QUERY_FIELD.match(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iq = new XContentStructure.InnerQuery(parseContext, childType == null ? null : new String[] { childType });
queryFound = true;
} else if ("inner_hits".equals(currentFieldName)) {

View File

@ -85,7 +85,7 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
// type may not have been extracted yet, so use the
// XContentStructure.<type> facade to parse if available,
// or delay parsing if not.
if (QUERY_FIELD.match(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iq = new XContentStructure.InnerQuery(parseContext, parentType == null ? null : new String[] {parentType});
queryFound = true;
} else if ("inner_hits".equals(currentFieldName)) {

View File

@ -23,7 +23,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
@ -45,11 +45,7 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptService;
import java.io.IOException;
import java.util.EnumSet;
/**
*
*/
public class IndexQueryParserService extends AbstractIndexComponent {
public static final String DEFAULT_FIELD = "index.query.default_field";
@ -82,7 +78,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
private String defaultField;
private boolean queryStringLenient;
private final boolean strict;
private final ParseFieldMatcher parseFieldMatcher;
private final boolean defaultAllowUnmappedFields;
@Inject
@ -103,7 +99,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
this.defaultField = indexSettings.get(DEFAULT_FIELD, AllFieldMapper.NAME);
this.queryStringLenient = indexSettings.getAsBoolean(QUERY_STRING_LENIENT, false);
this.strict = indexSettings.getAsBoolean(PARSE_STRICT, false);
this.parseFieldMatcher = new ParseFieldMatcher(indexSettings);
this.defaultAllowUnmappedFields = indexSettings.getAsBoolean(ALLOW_UNMAPPED, true);
this.indicesQueriesRegistry = indicesQueriesRegistry;
}
@ -240,9 +236,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
@Nullable
public Query parseInnerQuery(QueryParseContext parseContext) throws IOException {
if (strict) {
parseContext.parseFlags(EnumSet.of(ParseField.Flag.STRICT));
}
parseContext.parseFieldMatcher(parseFieldMatcher);
Query query = parseContext.parseInnerQuery();
if (query == null) {
query = Queries.newMatchNoDocsQuery();
@ -301,9 +295,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
parseContext.reset(parser);
try {
if (strict) {
parseContext.parseFlags(EnumSet.of(ParseField.Flag.STRICT));
}
parseContext.parseFieldMatcher(parseFieldMatcher);
Query query = parseContext.parseInnerQuery();
if (query == null) {
query = Queries.newMatchNoDocsQuery();
@ -313,4 +305,8 @@ public class IndexQueryParserService extends AbstractIndexComponent {
parseContext.reset(null);
}
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
}

View File

@ -73,10 +73,10 @@ public class IndicesQueryParser extends BaseQueryParserTemp {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (QUERY_FIELD.match(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
innerQuery = new XContentStructure.InnerQuery(parseContext, null);
queryFound = true;
} else if (NO_MATCH_QUERY.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
innerNoMatchQuery = new XContentStructure.InnerQuery(parseContext, null);
} else {
throw new QueryParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
@ -106,7 +106,7 @@ public class IndicesQueryParser extends BaseQueryParserTemp {
}
indicesFound = true;
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), parser.text());
} else if (NO_MATCH_QUERY.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
String type = parser.text();
if ("all".equals(type)) {
noMatchQuery = Queries.newMatchAllQuery();

View File

@ -102,7 +102,7 @@ public class MatchQueryParser extends BaseQueryParserTemp {
boost = parser.floatValue();
} else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
matchQuery.setPhraseSlop(parser.intValue());
} else if (Fuzziness.FIELD.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
matchQuery.setFuzziness(Fuzziness.parse(parser));
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
matchQuery.setFuzzyPrefixLength(parser.intValue());

View File

@ -115,47 +115,47 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (Fields.LIKE_TEXT.match(currentFieldName, parseContext.parseFlags())) {
if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.LIKE_TEXT)) {
likeTexts.add(parser.text());
} else if (Fields.LIKE.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.LIKE)) {
parseLikeField(parser, likeTexts, likeItems);
} else if (Fields.UNLIKE.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.UNLIKE)) {
parseLikeField(parser, unlikeTexts, unlikeItems);
} else if (Fields.MIN_TERM_FREQ.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MIN_TERM_FREQ)) {
mltQuery.setMinTermFrequency(parser.intValue());
} else if (Fields.MAX_QUERY_TERMS.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MAX_QUERY_TERMS)) {
mltQuery.setMaxQueryTerms(parser.intValue());
} else if (Fields.MIN_DOC_FREQ.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MIN_DOC_FREQ)) {
mltQuery.setMinDocFreq(parser.intValue());
} else if (Fields.MAX_DOC_FREQ.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MAX_DOC_FREQ)) {
mltQuery.setMaxDocFreq(parser.intValue());
} else if (Fields.MIN_WORD_LENGTH.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MIN_WORD_LENGTH)) {
mltQuery.setMinWordLen(parser.intValue());
} else if (Fields.MAX_WORD_LENGTH.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MAX_WORD_LENGTH)) {
mltQuery.setMaxWordLen(parser.intValue());
} else if (Fields.BOOST_TERMS.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.BOOST_TERMS)) {
float boostFactor = parser.floatValue();
if (boostFactor != 0) {
mltQuery.setBoostTerms(true);
mltQuery.setBoostTermsFactor(boostFactor);
}
} else if (Fields.MINIMUM_SHOULD_MATCH.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.MINIMUM_SHOULD_MATCH)) {
mltQuery.setMinimumShouldMatch(parser.text());
} else if ("analyzer".equals(currentFieldName)) {
analyzer = parseContext.analysisService().analyzer(parser.text());
} else if ("boost".equals(currentFieldName)) {
mltQuery.setBoost(parser.floatValue());
} else if (Fields.FAIL_ON_UNSUPPORTED_FIELD.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.FAIL_ON_UNSUPPORTED_FIELD)) {
failOnUnsupportedField = parser.booleanValue();
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if (Fields.INCLUDE.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.INCLUDE)) {
include = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (Fields.STOP_WORDS.match(currentFieldName, parseContext.parseFlags())) {
if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.STOP_WORDS)) {
Set<String> stopWords = Sets.newHashSet();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
stopWords.add(parser.text());
@ -168,25 +168,25 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
MappedFieldType fieldType = parseContext.fieldMapper(field);
moreLikeFields.add(fieldType == null ? field : fieldType.names().indexName());
}
} else if (Fields.DOCUMENT_IDS.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.DOCUMENT_IDS)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (!token.isValue()) {
throw new IllegalArgumentException("ids array element should only contain ids");
}
likeItems.add(newTermVectorsRequest().id(parser.text()));
}
} else if (Fields.DOCUMENTS.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.DOCUMENTS)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("docs array element should include an object");
}
likeItems.add(parseDocument(parser));
}
} else if (Fields.LIKE.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.LIKE)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseLikeField(parser, likeTexts, likeItems);
}
} else if (Fields.UNLIKE.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.UNLIKE)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseLikeField(parser, unlikeTexts, unlikeItems);
}
@ -194,10 +194,10 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
throw new QueryParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (Fields.LIKE.match(currentFieldName, parseContext.parseFlags())) {
if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.LIKE)) {
parseLikeField(parser, likeTexts, likeItems);
}
else if (Fields.UNLIKE.match(currentFieldName, parseContext.parseFlags())) {
else if (parseContext.parseFieldMatcher().match(currentFieldName, Fields.UNLIKE)) {
parseLikeField(parser, unlikeTexts, unlikeItems);
} else {
throw new QueryParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");

View File

@ -24,13 +24,13 @@ import com.google.common.collect.Lists;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.search.MatchQuery;
import java.io.IOException;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
@ -133,15 +133,11 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
return parseField;
}
public static Type parse(String value) {
return parse(value, ParseField.EMPTY_FLAGS);
}
public static Type parse(String value, EnumSet<ParseField.Flag> flags) {
public static Type parse(String value, ParseFieldMatcher parseFieldMatcher) {
MultiMatchQueryBuilder.Type[] values = MultiMatchQueryBuilder.Type.values();
Type type = null;
for (MultiMatchQueryBuilder.Type t : values) {
if (t.parseField().match(value, flags)) {
if (parseFieldMatcher.match(value, t.parseField())) {
type = t;
break;
}
@ -194,7 +190,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
* Sets the type of the text query.
*/
public MultiMatchQueryBuilder type(Object type) {
this.type = type == null ? null : Type.parse(type.toString().toLowerCase(Locale.ROOT));
this.type = type == null ? null : Type.parse(type.toString().toLowerCase(Locale.ROOT), ParseFieldMatcher.EMPTY);
return this;
}

View File

@ -81,7 +81,7 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
if ("query".equals(currentFieldName)) {
value = parser.objectText();
} else if ("type".equals(currentFieldName)) {
type = MultiMatchQueryBuilder.Type.parse(parser.text(), parseContext.parseFlags());
type = MultiMatchQueryBuilder.Type.parse(parser.text(), parseContext.parseFieldMatcher());
} else if ("analyzer".equals(currentFieldName)) {
String analyzer = parser.text();
if (parseContext.analysisService().analyzer(analyzer) == null) {
@ -92,7 +92,7 @@ public class MultiMatchQueryParser extends BaseQueryParserTemp {
boost = parser.floatValue();
} else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
multiMatchQuery.setPhraseSlop(parser.intValue());
} else if (Fuzziness.FIELD.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
multiMatchQuery.setFuzziness(Fuzziness.parse(parser));
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
multiMatchQuery.setFuzzyPrefixLength(parser.intValue());

View File

@ -70,7 +70,7 @@ public class NestedQueryParser extends BaseQueryParserTemp {
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
builder.query();
} else if (FILTER_FIELD.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
builder.filter();
} else if ("inner_hits".equals(currentFieldName)) {
builder.setInnerHits(innerHitsQueryParserHelper.parse(parseContext));

View File

@ -58,7 +58,7 @@ public class NotQueryParser extends BaseQueryParser {
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
if (QUERY_FIELD.match(currentFieldName)) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
query = parseContext.parseInnerFilterToQueryBuilder();
queryFound = true;
} else {

View File

@ -33,6 +33,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
@ -51,9 +52,6 @@ import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.*;
/**
*
*/
public class QueryParseContext {
private static final ParseField CACHE = new ParseField("_cache").withAllDeprecated("Elasticsearch makes its own caching decisions");
@ -91,7 +89,7 @@ public class QueryParseContext {
private XContentParser parser;
private EnumSet<ParseField.Flag> parseFlags = ParseField.EMPTY_FLAGS;
private ParseFieldMatcher parseFieldMatcher;
private boolean allowUnmappedFields;
@ -107,17 +105,17 @@ public class QueryParseContext {
this.indexQueryParser = indexQueryParser;
}
public void parseFlags(EnumSet<ParseField.Flag> parseFlags) {
this.parseFlags = parseFlags == null ? ParseField.EMPTY_FLAGS : parseFlags;
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
this.parseFieldMatcher = parseFieldMatcher;
}
public EnumSet<ParseField.Flag> parseFlags() {
return parseFlags;
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
public void reset(XContentParser jp) {
allowUnmappedFields = indexQueryParser.defaultAllowUnmappedFields();
this.parseFlags = ParseField.EMPTY_FLAGS;
this.parseFieldMatcher = ParseFieldMatcher.EMPTY;
this.lookup = null;
this.parser = jp;
this.namedQueries.clear();
@ -428,7 +426,7 @@ public class QueryParseContext {
* Return whether the setting is deprecated.
*/
public boolean isDeprecatedSetting(String setting) {
return CACHE.match(setting) || CACHE_KEY.match(setting);
return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY);
}
public Version indexVersionCreated() {

View File

@ -177,7 +177,7 @@ public class QueryStringQueryParser extends BaseQueryParserTemp {
qpSettings.fuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parser.textOrNull()));
} else if ("phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
qpSettings.phraseSlop(parser.intValue());
} else if (FUZZINESS.match(currentFieldName, parseContext.parseFlags())) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) {
qpSettings.fuzzyMinSim(Fuzziness.parse(parser).asSimilarity());
} else if ("boost".equals(currentFieldName)) {
qpSettings.boost(parser.floatValue());

View File

@ -104,7 +104,7 @@ public class RangeQueryParser extends BaseQueryParser {
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if (FIELDDATA_FIELD.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDDATA_FIELD)) {
// ignore
} else {
throw new QueryParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]");

View File

@ -20,23 +20,18 @@
package org.elasticsearch.index.query;
import com.google.common.base.Objects;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RandomAccessWeight;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.*;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptParameterParser;
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -79,9 +74,9 @@ public class ScriptQueryParser extends BaseQueryParserTemp {
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
} else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs)
if (parseContext.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, parseContext.parseFieldMatcher());
} else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs)
params = parser.map();
} else {
throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]");
@ -91,7 +86,7 @@ public class ScriptQueryParser extends BaseQueryParserTemp {
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if (!scriptParameterParser.token(currentFieldName, token, parser)) {
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) {
throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]");
}
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -73,7 +74,7 @@ public class TemplateQueryParser extends BaseQueryParserTemp {
@Nullable
public Query parse(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
Template template = parse(parser);
Template template = parse(parser, parseContext.parseFieldMatcher());
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
BytesReference querySource = (BytesReference) executable.run();
@ -85,30 +86,30 @@ public class TemplateQueryParser extends BaseQueryParserTemp {
}
}
public static Template parse(XContentParser parser, String... parameters) throws IOException {
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException {
Map<String, ScriptService.ScriptType> parameterMap = new HashMap<>(parametersToTypes);
for (String parameter : parameters) {
parameterMap.put(parameter, ScriptService.ScriptType.INLINE);
}
return parse(parser, parameterMap);
return parse(parser, parameterMap, parseFieldMatcher);
}
public static Template parse(String defaultLang, XContentParser parser, String... parameters) throws IOException {
public static Template parse(String defaultLang, XContentParser parser, ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException {
Map<String, ScriptService.ScriptType> parameterMap = new HashMap<>(parametersToTypes);
for (String parameter : parameters) {
parameterMap.put(parameter, ScriptService.ScriptType.INLINE);
}
return Template.parse(parser, parameterMap, defaultLang);
return Template.parse(parser, parameterMap, defaultLang, parseFieldMatcher);
}
public static Template parse(XContentParser parser) throws IOException {
return parse(parser, parametersToTypes);
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
return parse(parser, parametersToTypes, parseFieldMatcher);
}
public static Template parse(XContentParser parser, Map<String, ScriptService.ScriptType> parameterMap) throws IOException {
return Template.parse(parser, parameterMap);
public static Template parse(XContentParser parser, Map<String, ScriptService.ScriptType> parameterMap, ParseFieldMatcher parseFieldMatcher) throws IOException {
return Template.parse(parser, parameterMap, parseFieldMatcher);
}
@Override

View File

@ -37,7 +37,6 @@ import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.indices.cache.query.terms.TermsLookup;
import org.elasticsearch.search.internal.SearchContext;
@ -140,7 +139,7 @@ public class TermsQueryParser extends BaseQueryParserTemp {
} else if (token.isValue()) {
if (EXECUTION_KEY.equals(currentFieldName)) {
// ignore
} else if (MIN_SHOULD_MATCH_FIELD.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SHOULD_MATCH_FIELD)) {
if (minShouldMatch != null) {
throw new IllegalArgumentException("[" + currentFieldName + "] is not allowed in a filter context for the [" + TermsQueryBuilder.NAME + "] query");
}

View File

@ -122,7 +122,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
String currentFieldName;
XContentParser.Token token;
AbstractDistanceScoreFunction scoreFunction = null;
AbstractDistanceScoreFunction scoreFunction;
String multiValueMode = "MIN";
XContentBuilder variableContent = XContentFactory.jsonBuilder();
String fieldName = null;
@ -132,7 +132,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
if (token == XContentParser.Token.START_OBJECT) {
variableContent.copyCurrentStructure(parser);
fieldName = currentFieldName;
} else if (MULTI_VALUE_MODE.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MULTI_VALUE_MODE)) {
multiValueMode = parser.text();
} else {
throw new ElasticsearchParseException("malformed score function score parameters.");

View File

@ -74,7 +74,7 @@ public class FunctionScoreQueryParser implements QueryParser {
static {
CombineFunction[] values = CombineFunction.values();
Builder<String, CombineFunction> combineFunctionMapBuilder = ImmutableMap.<String, CombineFunction>builder();
Builder<String, CombineFunction> combineFunctionMapBuilder = ImmutableMap.builder();
for (CombineFunction combineFunction : values) {
combineFunctionMapBuilder.put(combineFunction.getName(), combineFunction);
}
@ -108,7 +108,7 @@ public class FunctionScoreQueryParser implements QueryParser {
currentFieldName = parser.currentName();
} else if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQuery();
} else if (FILTER_FIELD.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
filter = parseContext.parseInnerFilter();
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
scoreMode = parseScoreMode(parseContext, parser);
@ -217,7 +217,7 @@ public class FunctionScoreQueryParser implements QueryParser {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (WEIGHT_FIELD.match(currentFieldName)) {
} else if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
functionWeight = parser.floatValue();
} else {
if ("filter".equals(currentFieldName)) {

View File

@ -21,6 +21,7 @@
package org.elasticsearch.index.query.functionscore.script;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.lucene.search.function.ScriptScoreFunction;
@ -67,15 +68,15 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
} else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs)
if (parseContext.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, parseContext.parseFieldMatcher());
} else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs)
vars = parser.map();
} else {
throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (!scriptParameterParser.token(currentFieldName, token, parser)) {
if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) {
throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
}
}

View File

@ -22,7 +22,10 @@ package org.elasticsearch.index.snapshots.blobstore;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams;
import org.apache.lucene.index.*;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
@ -33,6 +36,7 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobMetaData;
@ -98,6 +102,8 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
private boolean compress;
private final ParseFieldMatcher parseFieldMatcher;
protected static final String SNAPSHOT_PREFIX = "snapshot-";
protected static final String SNAPSHOT_INDEX_PREFIX = "index-";
@ -109,6 +115,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
@Inject
public BlobStoreIndexShardRepository(Settings settings, RepositoryName repositoryName, IndicesService indicesService, ClusterService clusterService) {
super(settings);
this.parseFieldMatcher = new ParseFieldMatcher(settings);
this.repositoryName = repositoryName.name();
this.indicesService = indicesService;
this.clusterService = clusterService;
@ -259,11 +266,11 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
* @return snapshot
* @throws IOException if an IOException occurs
*/
public static BlobStoreIndexShardSnapshot readSnapshot(InputStream stream) throws IOException {
public static BlobStoreIndexShardSnapshot readSnapshot(InputStream stream, ParseFieldMatcher parseFieldMatcher) throws IOException {
byte[] data = ByteStreams.toByteArray(stream);
try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
parser.nextToken();
return BlobStoreIndexShardSnapshot.fromXContent(parser);
return BlobStoreIndexShardSnapshot.fromXContent(parser, parseFieldMatcher);
}
}
@ -274,11 +281,11 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
* @return snapshot
* @throws IOException if an IOException occurs
* */
public static BlobStoreIndexShardSnapshots readSnapshots(InputStream stream) throws IOException {
public static BlobStoreIndexShardSnapshots readSnapshots(InputStream stream, ParseFieldMatcher parseFieldMatcher) throws IOException {
byte[] data = ByteStreams.toByteArray(stream);
try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
parser.nextToken();
return BlobStoreIndexShardSnapshots.fromXContent(parser);
return BlobStoreIndexShardSnapshots.fromXContent(parser, parseFieldMatcher);
}
}
/**
@ -351,7 +358,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
public BlobStoreIndexShardSnapshot loadSnapshot() {
BlobStoreIndexShardSnapshot snapshot;
try (InputStream stream = blobContainer.openInput(snapshotBlobName(snapshotId))) {
snapshot = readSnapshot(stream);
snapshot = readSnapshot(stream, parseFieldMatcher);
} catch (IOException ex) {
throw new IndexShardRestoreFailedException(shardId, "failed to read shard snapshot file", ex);
}
@ -475,7 +482,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
}
if (latest >= 0) {
try (InputStream stream = blobContainer.openInput(SNAPSHOT_INDEX_PREFIX + latest)) {
return new Tuple<>(readSnapshots(stream), latest);
return new Tuple<>(readSnapshots(stream, parseFieldMatcher), latest);
} catch (IOException e) {
logger.warn("failed to read index file [{}]", e, SNAPSHOT_INDEX_PREFIX + latest);
}
@ -486,7 +493,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
for (String name : blobs.keySet()) {
if (name.startsWith(SNAPSHOT_PREFIX)) {
try (InputStream stream = blobContainer.openInput(name)) {
BlobStoreIndexShardSnapshot snapshot = readSnapshot(stream);
BlobStoreIndexShardSnapshot snapshot = readSnapshot(stream, parseFieldMatcher);
snapshots.add(new SnapshotFiles(snapshot.snapshot(), snapshot.indexFiles()));
} catch (IOException e) {
logger.warn("failed to read commit point [{}]", e, name);
@ -753,7 +760,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
* The new logic for StoreFileMetaData reads the entire <tt>.si</tt> and <tt>segments.n</tt> files to strengthen the
* comparison of the files on a per-segment / per-commit level.
*/
private static final void maybeRecalculateMetadataHash(final BlobContainer blobContainer, final FileInfo fileInfo, Store.MetadataSnapshot snapshot) throws Throwable {
private static void maybeRecalculateMetadataHash(final BlobContainer blobContainer, final FileInfo fileInfo, Store.MetadataSnapshot snapshot) throws Throwable {
final StoreFileMetaData metadata;
if (fileInfo != null && (metadata = snapshot.get(fileInfo.physicalName())) != null) {
if (metadata.hash().length > 0 && fileInfo.metadata().hash().length == 0) {

View File

@ -25,6 +25,7 @@ import org.apache.lucene.util.Version;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ToXContent;
@ -448,7 +449,7 @@ public class BlobStoreIndexShardSnapshot {
* @return shard snapshot metadata
* @throws IOException
*/
public static BlobStoreIndexShardSnapshot fromXContent(XContentParser parser) throws IOException {
public static BlobStoreIndexShardSnapshot fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
String snapshot = null;
long indexVersion = -1;
@ -466,24 +467,24 @@ public class BlobStoreIndexShardSnapshot {
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (token.isValue()) {
if (ParseFields.NAME.match(currentFieldName)) {
if (parseFieldMatcher.match(currentFieldName, ParseFields.NAME)) {
snapshot = parser.text();
} else if (ParseFields.INDEX_VERSION.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ParseFields.INDEX_VERSION)) {
// The index-version is needed for backward compatibility with v 1.0
indexVersion = parser.longValue();
} else if (ParseFields.START_TIME.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ParseFields.START_TIME)) {
startTime = parser.longValue();
} else if (ParseFields.TIME.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ParseFields.TIME)) {
time = parser.longValue();
} else if (ParseFields.NUMBER_OF_FILES.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ParseFields.NUMBER_OF_FILES)) {
numberOfFiles = parser.intValue();
} else if (ParseFields.TOTAL_SIZE.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ParseFields.TOTAL_SIZE)) {
totalSize = parser.longValue();
} else {
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (ParseFields.FILES.match(currentFieldName)) {
if (parseFieldMatcher.match(currentFieldName, ParseFields.FILES)) {
while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) {
indexFiles.add(FileInfo.fromXContent(parser));
}
@ -498,7 +499,7 @@ public class BlobStoreIndexShardSnapshot {
}
}
}
return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, ImmutableList.<FileInfo>copyOf(indexFiles),
return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, ImmutableList.copyOf(indexFiles),
startTime, time, numberOfFiles, totalSize);
}

View File

@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
@ -224,7 +225,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
return builder;
}
public static BlobStoreIndexShardSnapshots fromXContent(XContentParser parser) throws IOException {
public static BlobStoreIndexShardSnapshots fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
XContentParser.Token token = parser.currentToken();
Map<String, List<String>> snapshotsMap = newHashMap();
ImmutableMap.Builder<String, FileInfo> filesBuilder = ImmutableMap.builder();
@ -236,7 +237,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
if (ParseFields.FILES.match(currentFieldName) == false) {
if (parseFieldMatcher.match(currentFieldName, ParseFields.FILES) == false) {
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
}
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
@ -244,7 +245,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
filesBuilder.put(fileInfo.name(), fileInfo);
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (ParseFields.SNAPSHOTS.match(currentFieldName) == false) {
if (parseFieldMatcher.match(currentFieldName, ParseFields.SNAPSHOTS) == false) {
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -259,7 +260,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
if (ParseFields.FILES.match(currentFieldName) == false) {
if (parseFieldMatcher.match(currentFieldName, ParseFields.FILES) == false) {
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
}
List<String> fileNames = newArrayList();

View File

@ -32,9 +32,7 @@ import org.apache.lucene.util.Counter;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.HasContext;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.HasHeaders;
import org.elasticsearch.common.*;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.text.StringText;
@ -77,11 +75,7 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
/**
@ -123,7 +117,8 @@ public class PercolateContext extends SearchContext {
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard,
IndexService indexService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, ScriptService scriptService, Query aliasFilter) {
BigArrays bigArrays, ScriptService scriptService, Query aliasFilter, ParseFieldMatcher parseFieldMatcher) {
super(parseFieldMatcher);
this.indexShard = indexShard;
this.indexService = indexService;
this.fieldDataService = indexService.fieldData();

View File

@ -20,18 +20,11 @@ package org.elasticsearch.percolator;
import com.carrotsearch.hppc.IntObjectHashMap;
import com.google.common.collect.Lists;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.memory.ExtendedMemoryIndex;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.*;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.ElasticsearchParseException;
@ -41,6 +34,7 @@ import org.elasticsearch.action.percolate.PercolateShardResponse;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
@ -64,22 +58,13 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.percolator.stats.ShardPercolateService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.percolator.QueryCollector.Count;
import org.elasticsearch.percolator.QueryCollector.Match;
import org.elasticsearch.percolator.QueryCollector.MatchAndScore;
import org.elasticsearch.percolator.QueryCollector.MatchAndSort;
import org.elasticsearch.percolator.QueryCollector.*;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchShardTarget;
@ -100,9 +85,7 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.SourceToParse.source;
import static org.elasticsearch.percolator.QueryCollector.count;
import static org.elasticsearch.percolator.QueryCollector.match;
import static org.elasticsearch.percolator.QueryCollector.matchAndScore;
import static org.elasticsearch.percolator.QueryCollector.*;
public class PercolatorService extends AbstractComponent {
@ -126,6 +109,8 @@ public class PercolatorService extends AbstractComponent {
private final CloseableThreadLocal<MemoryIndex> cache;
private final ParseFieldMatcher parseFieldMatcher;
@Inject
public PercolatorService(Settings settings, IndicesService indicesService,
PageCacheRecycler pageCacheRecycler, BigArrays bigArrays,
@ -133,6 +118,7 @@ public class PercolatorService extends AbstractComponent {
AggregationPhase aggregationPhase, ScriptService scriptService,
MappingUpdatedAction mappingUpdatedAction) {
super(settings);
this.parseFieldMatcher = new ParseFieldMatcher(settings);
this.indicesService = indicesService;
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays;
@ -186,7 +172,7 @@ public class PercolatorService extends AbstractComponent {
SearchShardTarget searchShardTarget = new SearchShardTarget(clusterService.localNode().id(), request.shardId().getIndex(), request.shardId().id());
final PercolateContext context = new PercolateContext(
request, searchShardTarget, indexShard, percolateIndexService, pageCacheRecycler, bigArrays, scriptService, aliasFilter
request, searchShardTarget, indexShard, percolateIndexService, pageCacheRecycler, bigArrays, scriptService, aliasFilter, parseFieldMatcher
);
try {
ParsedDocument parsedDocument = parseRequest(percolateIndexService, request, context);

View File

@ -22,6 +22,7 @@ package org.elasticsearch.rest;
import org.elasticsearch.action.*;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.FilterClient;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
@ -39,11 +40,13 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH
private final RestController controller;
private final Client client;
protected final ParseFieldMatcher parseFieldMatcher;
protected BaseRestHandler(Settings settings, RestController controller, Client client) {
super(settings);
this.controller = controller;
this.client = client;
this.parseFieldMatcher = new ParseFieldMatcher(settings);
}
@Override

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRespo
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -57,7 +58,7 @@ public class RestClearIndicesCacheAction extends BaseRestHandler {
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
ClearIndicesCacheRequest clearIndicesCacheRequest = new ClearIndicesCacheRequest(Strings.splitStringByCommaToArray(request.param("index")));
clearIndicesCacheRequest.indicesOptions(IndicesOptions.fromRequest(request, clearIndicesCacheRequest.indicesOptions()));
fromRequest(request, clearIndicesCacheRequest);
fromRequest(request, clearIndicesCacheRequest, parseFieldMatcher);
client.admin().indices().clearCache(clearIndicesCacheRequest, new RestBuilderListener<ClearIndicesCacheResponse>(channel) {
@Override
public RestResponse buildResponse(ClearIndicesCacheResponse response, XContentBuilder builder) throws Exception {
@ -69,20 +70,19 @@ public class RestClearIndicesCacheAction extends BaseRestHandler {
});
}
public static ClearIndicesCacheRequest fromRequest(final RestRequest request, ClearIndicesCacheRequest clearIndicesCacheRequest) {
public static ClearIndicesCacheRequest fromRequest(final RestRequest request, ClearIndicesCacheRequest clearIndicesCacheRequest, ParseFieldMatcher parseFieldMatcher) {
for (Map.Entry<String, String> entry : request.params().entrySet()) {
if (Fields.QUERY.match(entry.getKey())) {
if (parseFieldMatcher.match(entry.getKey(), Fields.QUERY)) {
clearIndicesCacheRequest.queryCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.queryCache()));
}
if (Fields.FIELD_DATA.match(entry.getKey())) {
if (parseFieldMatcher.match(entry.getKey(), Fields.FIELD_DATA)) {
clearIndicesCacheRequest.fieldDataCache(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.fieldDataCache()));
}
if (Fields.RECYCLER.match(entry.getKey())) {
if (parseFieldMatcher.match(entry.getKey(), Fields.RECYCLER)) {
clearIndicesCacheRequest.recycler(request.paramAsBoolean(entry.getKey(), clearIndicesCacheRequest.recycler()));
}
if (Fields.FIELDS.match(entry.getKey())) {
if (parseFieldMatcher.match(entry.getKey(), Fields.FIELDS)) {
clearIndicesCacheRequest.fields(request.paramAsStringArray(entry.getKey(), clearIndicesCacheRequest.fields()));
}
}

View File

@ -68,7 +68,7 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler {
String templateId = request.param("id");
final Template template;
if (templateId == null) {
template = Template.parse(parser);
template = Template.parse(parser, parseFieldMatcher);
} else {
Map<String, Object> params = null;
String currentFieldName = null;
@ -79,7 +79,7 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (ScriptField.PARAMS.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ScriptField.PARAMS)) {
if (token == XContentParser.Token.START_OBJECT) {
params = parser.map();
} else {

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -79,11 +80,11 @@ public class RestSearchAction extends BaseRestHandler {
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
SearchRequest searchRequest;
searchRequest = RestSearchAction.parseSearchRequest(request);
searchRequest = RestSearchAction.parseSearchRequest(request, parseFieldMatcher);
client.search(searchRequest, new RestStatusToXContentListener<SearchResponse>(channel));
}
public static SearchRequest parseSearchRequest(RestRequest request) {
public static SearchRequest parseSearchRequest(RestRequest request, ParseFieldMatcher parseFieldMatcher) {
String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
SearchRequest searchRequest = new SearchRequest(indices);
// get the content, and put it in the body
@ -101,8 +102,8 @@ public class RestSearchAction extends BaseRestHandler {
// from the REST layer. these modes are an internal optimization and should
// not be specified explicitly by the user.
String searchType = request.param("search_type");
if (SearchType.fromString(searchType).equals(SearchType.QUERY_AND_FETCH) ||
SearchType.fromString(searchType).equals(SearchType.DFS_QUERY_AND_FETCH)) {
if (SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.QUERY_AND_FETCH) ||
SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.DFS_QUERY_AND_FETCH)) {
throw new IllegalArgumentException("Unsupported search type [" + searchType + "]");
} else {
searchRequest.searchType(searchType);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.script;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.script.Script.ScriptParseException;
@ -43,7 +44,7 @@ public abstract class AbstractScriptParser<S extends Script> {
return Collections.emptyMap();
}
public S parse(XContentParser parser) throws IOException {
public S parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
XContentParser.Token token = parser.currentToken();
// If the parser hasn't yet been pushed to the first token, do it now
@ -67,37 +68,37 @@ public abstract class AbstractScriptParser<S extends Script> {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (ScriptType.INLINE.getParseField().match(currentFieldName) || ScriptService.SCRIPT_INLINE.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ScriptType.INLINE.getParseField()) || parseFieldMatcher.match(currentFieldName, ScriptService.SCRIPT_INLINE)) {
type = ScriptType.INLINE;
script = parseInlineScript(parser);
} else if (ScriptType.FILE.getParseField().match(currentFieldName) || ScriptService.SCRIPT_FILE.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ScriptType.FILE.getParseField()) || parseFieldMatcher.match(currentFieldName, ScriptService.SCRIPT_FILE)) {
type = ScriptType.FILE;
if (token == XContentParser.Token.VALUE_STRING) {
script = parser.text();
} else {
throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token);
}
} else if (ScriptType.INDEXED.getParseField().match(currentFieldName) || ScriptService.SCRIPT_ID.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ScriptType.INDEXED.getParseField()) || parseFieldMatcher.match(currentFieldName, ScriptService.SCRIPT_ID)) {
type = ScriptType.INDEXED;
if (token == XContentParser.Token.VALUE_STRING) {
script = parser.text();
} else {
throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token);
}
} else if (ScriptField.LANG.match(currentFieldName) || ScriptService.SCRIPT_LANG.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ScriptField.LANG) || parseFieldMatcher.match(currentFieldName, ScriptService.SCRIPT_LANG)) {
if (token == XContentParser.Token.VALUE_STRING) {
lang = parser.text();
} else {
throw new ScriptParseException("expected a string value for field [{}], but found [{}]", currentFieldName, token);
}
} else if (ScriptField.PARAMS.match(currentFieldName)) {
} else if (parseFieldMatcher.match(currentFieldName, ScriptField.PARAMS)) {
if (token == XContentParser.Token.START_OBJECT) {
params = parser.map();
} else {
throw new ScriptParseException("expected an object for field [{}], but found [{}]", currentFieldName, token);
}
} else {
// TODO remove this in 2.0
// TODO remove this in 3.0
ScriptType paramScriptType = getAdditionalScriptParameters().get(currentFieldName);
if (paramScriptType != null) {
script = parseInlineScript(parser);
@ -125,7 +126,7 @@ public abstract class AbstractScriptParser<S extends Script> {
return null;
}
public S parse(Map<String, Object> config, boolean removeMatchedEntries) {
public S parse(Map<String, Object> config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) {
String script = null;
ScriptType type = null;
String lang = null;
@ -134,7 +135,7 @@ public abstract class AbstractScriptParser<S extends Script> {
Entry<String, Object> entry = itr.next();
String parameterName = entry.getKey();
Object parameterValue = entry.getValue();
if (ScriptField.LANG.match(parameterName) || ScriptService.SCRIPT_LANG.match(parameterName)) {
if (parseFieldMatcher.match(parameterName, ScriptField.LANG) || parseFieldMatcher.match(parameterName, ScriptService.SCRIPT_LANG)) {
if (parameterValue instanceof String || parameterValue == null) {
lang = (String) parameterValue;
if (removeMatchedEntries) {
@ -143,7 +144,7 @@ public abstract class AbstractScriptParser<S extends Script> {
} else {
throw new ScriptParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (ScriptField.PARAMS.match(parameterName)) {
} else if (parseFieldMatcher.match(parameterName, ScriptField.PARAMS)) {
if (parameterValue instanceof Map || parameterValue == null) {
params = (Map<String, Object>) parameterValue;
if (removeMatchedEntries) {
@ -152,7 +153,7 @@ public abstract class AbstractScriptParser<S extends Script> {
} else {
throw new ScriptParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (ScriptType.INLINE.getParseField().match(parameterName) || ScriptService.SCRIPT_INLINE.match(parameterName)) {
} else if (parseFieldMatcher.match(parameterName, ScriptType.INLINE.getParseField()) || parseFieldMatcher.match(parameterName, ScriptService.SCRIPT_INLINE)) {
if (parameterValue instanceof String || parameterValue == null) {
script = (String) parameterValue;
type = ScriptType.INLINE;
@ -162,7 +163,7 @@ public abstract class AbstractScriptParser<S extends Script> {
} else {
throw new ScriptParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (ScriptType.FILE.getParseField().match(parameterName) || ScriptService.SCRIPT_FILE.match(parameterName)) {
} else if (parseFieldMatcher.match(parameterName, ScriptType.FILE.getParseField()) || parseFieldMatcher.match(parameterName, ScriptService.SCRIPT_FILE)) {
if (parameterValue instanceof String || parameterValue == null) {
script = (String) parameterValue;
type = ScriptType.FILE;
@ -172,7 +173,7 @@ public abstract class AbstractScriptParser<S extends Script> {
} else {
throw new ScriptParseException("Value must be of type String: [" + parameterName + "]");
}
} else if (ScriptType.INDEXED.getParseField().match(parameterName) || ScriptService.SCRIPT_ID.match(parameterName)) {
} else if (parseFieldMatcher.match(parameterName, ScriptType.INDEXED.getParseField()) || parseFieldMatcher.match(parameterName, ScriptService.SCRIPT_ID)) {
if (parameterValue instanceof String || parameterValue == null) {
script = (String) parameterValue;
type = ScriptType.INDEXED;

View File

@ -22,6 +22,7 @@ package org.elasticsearch.script;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
@ -204,12 +205,12 @@ public class Script implements ToXContent, Streamable {
return script;
}
public static Script parse(Map<String, Object> config, boolean removeMatchedEntries) {
return PARSER.parse(config, removeMatchedEntries);
public static Script parse(Map<String, Object> config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) {
return PARSER.parse(config, removeMatchedEntries, parseFieldMatcher);
}
public static Script parse(XContentParser parser) throws IOException {
return PARSER.parse(parser);
public static Script parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
return PARSER.parse(parser, parseFieldMatcher);
}
@Override
@ -281,7 +282,7 @@ public class Script implements ToXContent, Streamable {
}
}
public static interface ScriptField {
public interface ScriptField {
ParseField SCRIPT = new ParseField("script");
ParseField LANG = new ParseField("lang");
ParseField PARAMS = new ParseField("params");

View File

@ -20,19 +20,14 @@
package org.elasticsearch.script;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script.ScriptParseException;
import org.elasticsearch.script.ScriptService.ScriptType;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.*;
public class ScriptParameterParser {
@ -59,7 +54,7 @@ public class ScriptParameterParser {
fileParameters = new HashSet<>();
indexedParameters = new HashSet<>();
for (String parameterName : parameterNames) {
if (ScriptService.SCRIPT_LANG.match(parameterName)) {
if (ParseFieldMatcher.EMPTY.match(parameterName, ScriptService.SCRIPT_LANG)) {
throw new IllegalArgumentException("lang is reserved and cannot be used as a parameter name");
}
inlineParameters.add(new ParseField(parameterName));
@ -69,28 +64,28 @@ public class ScriptParameterParser {
}
}
public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser) throws IOException {
public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
if (token == XContentParser.Token.VALUE_STRING) {
if (ScriptService.SCRIPT_LANG.match(currentFieldName)) {
if (parseFieldMatcher.match(currentFieldName, ScriptService.SCRIPT_LANG)) {
lang = parser.text();
return true;
} else {
for (ParseField parameter : inlineParameters) {
if (parameter.match(currentFieldName)) {
if (parseFieldMatcher.match(currentFieldName, parameter)) {
String coreParameterName = parameter.getPreferredName();
putParameterValue(coreParameterName, parser.textOrNull(), ScriptType.INLINE);
return true;
}
}
for (ParseField parameter : fileParameters) {
if (parameter.match(currentFieldName)) {
if (parseFieldMatcher.match(currentFieldName, parameter)) {
String coreParameterName = parameter.getPreferredName().replace(FILE_SUFFIX, "");
putParameterValue(coreParameterName, parser.textOrNull(), ScriptType.FILE);
return true;
}
}
for (ParseField parameter : indexedParameters) {
if (parameter.match(currentFieldName)) {
if (parseFieldMatcher.match(currentFieldName, parameter)) {
String coreParameterName = parameter.getPreferredName().replace(INDEXED_SUFFIX, "");
putParameterValue(coreParameterName, parser.textOrNull(), ScriptType.INDEXED);
return true;
@ -101,12 +96,12 @@ public class ScriptParameterParser {
return false;
}
public void parseConfig(Map<String, Object> config, boolean removeMatchedEntries) {
for (Iterator<Entry<String, Object>> itr = config.entrySet().iterator(); itr.hasNext();) {
Entry<String, Object> entry = itr.next();
public void parseConfig(Map<String, Object> config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) {
for (Iterator<Map.Entry<String, Object>> itr = config.entrySet().iterator(); itr.hasNext();) {
Map.Entry<String, Object> entry = itr.next();
String parameterName = entry.getKey();
Object parameterValue = entry.getValue();
if (ScriptService.SCRIPT_LANG.match(parameterName)) {
if (parseFieldMatcher.match(parameterName, ScriptService.SCRIPT_LANG)) {
if (parameterValue instanceof String || parameterValue == null) {
lang = (String) parameterValue;
if (removeMatchedEntries) {
@ -117,9 +112,9 @@ public class ScriptParameterParser {
}
} else {
for (ParseField parameter : inlineParameters) {
if (parameter.match(parameterName)) {
if (parseFieldMatcher.match(parameterName, parameter)) {
String coreParameterName = parameter.getPreferredName();
String stringValue = null;
String stringValue;
if (parameterValue instanceof String) {
stringValue = (String) parameterValue;
} else {
@ -132,9 +127,9 @@ public class ScriptParameterParser {
}
}
for (ParseField parameter : fileParameters) {
if (parameter.match(parameterName)) {
if (parseFieldMatcher.match(parameterName, parameter)) {
String coreParameterName = parameter.getPreferredName().replace(FILE_SUFFIX, "");;
String stringValue = null;
String stringValue;
if (parameterValue instanceof String) {
stringValue = (String) parameterValue;
} else {
@ -147,7 +142,7 @@ public class ScriptParameterParser {
}
}
for (ParseField parameter : indexedParameters) {
if (parameter.match(parameterName)) {
if (parseFieldMatcher.match(parameterName, parameter)) {
String coreParameterName = parameter.getPreferredName().replace(INDEXED_SUFFIX, "");
String stringValue = null;
if (parameterValue instanceof String) {

View File

@ -25,7 +25,6 @@ import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.delete.DeleteRequest;
@ -39,6 +38,7 @@ import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequest;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
@ -103,10 +103,12 @@ public class ScriptService extends AbstractComponent implements Closeable {
private final ScriptModes scriptModes;
private final ScriptContextRegistry scriptContextRegistry;
private final ParseFieldMatcher parseFieldMatcher;
private Client client = null;
/**
* @deprecated Use {@link ScriptField} instead. This should be removed in
* @deprecated Use {@link org.elasticsearch.script.Script.ScriptField} instead. This should be removed in
* 2.0
*/
public static final ParseField SCRIPT_LANG = new ParseField("lang","script_lang");
@ -130,7 +132,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
public ScriptService(Settings settings, Environment env, Set<ScriptEngineService> scriptEngines,
ResourceWatcherService resourceWatcherService, ScriptContextRegistry scriptContextRegistry) throws IOException {
super(settings);
this.parseFieldMatcher = new ParseFieldMatcher(settings);
if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) {
throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings. \n" +
"Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: on` and `script.indexed: on` in elasticsearch.yml");
@ -329,7 +331,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
try {
XContentParser parser = XContentFactory.xContent(scriptBytes).createParser(scriptBytes);
parser.nextToken();
Template template = TemplateQueryParser.parse(scriptLang, parser, "params", "script", "template");
Template template = TemplateQueryParser.parse(scriptLang, parser, parseFieldMatcher, "params", "script", "template");
if (Strings.hasLength(template.getScript())) {
//Just try and compile it
//This will have the benefit of also adding the script to the cache if it compiles

View File

@ -20,6 +20,7 @@
package org.elasticsearch.script;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -119,23 +120,23 @@ public class Template extends Script {
}
@SuppressWarnings("unchecked")
public static Script parse(Map<String, Object> config, boolean removeMatchedEntries) {
return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries);
public static Script parse(Map<String, Object> config, boolean removeMatchedEntries, ParseFieldMatcher parseFieldMatcher) {
return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(config, removeMatchedEntries, parseFieldMatcher);
}
@SuppressWarnings("unchecked")
public static Template parse(XContentParser parser) throws IOException {
return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(parser);
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
return new TemplateParser(Collections.EMPTY_MAP, MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher);
}
@Deprecated
public static Template parse(XContentParser parser, Map<String, ScriptType> additionalTemplateFieldNames) throws IOException {
return new TemplateParser(additionalTemplateFieldNames, MustacheScriptEngineService.NAME).parse(parser);
public static Template parse(XContentParser parser, Map<String, ScriptType> additionalTemplateFieldNames, ParseFieldMatcher parseFieldMatcher) throws IOException {
return new TemplateParser(additionalTemplateFieldNames, MustacheScriptEngineService.NAME).parse(parser, parseFieldMatcher);
}
@Deprecated
public static Template parse(XContentParser parser, Map<String, ScriptType> additionalTemplateFieldNames, String defaultLang) throws IOException {
return new TemplateParser(additionalTemplateFieldNames, defaultLang).parse(parser);
public static Template parse(XContentParser parser, Map<String, ScriptType> additionalTemplateFieldNames, String defaultLang, ParseFieldMatcher parseFieldMatcher) throws IOException {
return new TemplateParser(additionalTemplateFieldNames, defaultLang).parse(parser, parseFieldMatcher);
}
@Override

View File

@ -36,6 +36,7 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
@ -144,11 +145,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
private final ImmutableMap<String, SearchParseElement> elementParsers;
private final ParseFieldMatcher parseFieldMatcher;
@Inject
public SearchService(Settings settings, ClusterService clusterService, IndicesService indicesService,IndicesWarmer indicesWarmer, ThreadPool threadPool,
ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase,
IndicesRequestCache indicesQueryCache) {
super(settings);
this.parseFieldMatcher = new ParseFieldMatcher(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.indicesService = indicesService;
@ -582,12 +586,13 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId());
Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher;
SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter());
SearchContext context = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget, engineSearcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher);
SearchContext.setCurrent(context);
try {
context.scroll(request.scroll());
parseTemplate(request);
parseTemplate(request, context);
parseSource(context, request.source());
parseSource(context, request.extraSource());
@ -680,7 +685,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
}
}
private void parseTemplate(ShardSearchRequest request) {
private void parseTemplate(ShardSearchRequest request, SearchContext searchContext) {
BytesReference processedQuery;
if (request.template() != null) {
@ -695,7 +700,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
try {
parser = XContentFactory.xContent(request.templateSource()).createParser(request.templateSource());
template = TemplateQueryParser.parse(parser, "params", "template");
template = TemplateQueryParser.parse(parser, searchContext.parseFieldMatcher(), "params", "template");
if (template.getType() == ScriptService.ScriptType.INLINE) {
//Try to double parse for nested template id/file
@ -714,7 +719,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
}
if (parser != null) {
try {
Template innerTemplate = TemplateQueryParser.parse(parser);
Template innerTemplate = TemplateQueryParser.parse(parser, searchContext.parseFieldMatcher());
if (hasLength(innerTemplate.getScript()) && !innerTemplate.getType().equals(ScriptService.ScriptType.INLINE)) {
//An inner template referring to a filename or id
template = new Template(innerTemplate.getScript(), innerTemplate.getType(),

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
@ -28,7 +29,6 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.EnumSet;
/**
* An Aggregator.
@ -42,7 +42,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
*
* @see {@link AggregatorFactory}
*/
public static interface Parser {
public interface Parser {
/**
* @return The aggregation type this parser is associated with.
@ -134,14 +134,10 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
return parseField;
}
public static SubAggCollectionMode parse(String value) {
return parse(value, ParseField.EMPTY_FLAGS);
}
public static SubAggCollectionMode parse(String value, EnumSet<ParseField.Flag> flags) {
public static SubAggCollectionMode parse(String value, ParseFieldMatcher parseFieldMatcher) {
SubAggCollectionMode[] modes = SubAggCollectionMode.values();
for (SubAggCollectionMode mode : modes) {
if (mode.parseField.match(value, flags)) {
if (parseFieldMatcher.match(value, mode.parseField)) {
return mode;
}
}

View File

@ -60,14 +60,14 @@ public class FiltersParser implements Aggregator.Parser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if (OTHER_BUCKET_FIELD.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, OTHER_BUCKET_FIELD)) {
otherBucket = parser.booleanValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (OTHER_BUCKET_KEY_FIELD.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, OTHER_BUCKET_KEY_FIELD)) {
otherBucketKey = parser.text();
otherBucket = true;
} else {
@ -75,7 +75,7 @@ public class FiltersParser implements Aggregator.Parser {
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (FILTERS_FIELD.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, FILTERS_FIELD)) {
keyed = true;
String key = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -91,7 +91,7 @@ public class FiltersParser implements Aggregator.Parser {
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (FILTERS_FIELD.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, FILTERS_FIELD)) {
keyed = false;
int idx = 0;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {

View File

@ -101,11 +101,11 @@ public class DateHistogramParser implements Aggregator.Parser {
} else if (vsParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_STRING) {
if (TIME_ZONE.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, TIME_ZONE)) {
timeZone = DateTimeZone.forID(parser.text());
} else if (OFFSET.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, OFFSET)) {
offset = parseOffset(parser.text());
} else if (INTERVAL.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, INTERVAL)) {
interval = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
@ -139,7 +139,7 @@ public class DateHistogramParser implements Aggregator.Parser {
//TODO should we throw an error if the value is not "asc" or "desc"???
}
}
} else if (EXTENDED_BOUNDS.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, EXTENDED_BOUNDS)) {
extendedBounds = new ExtendedBounds();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {

View File

@ -93,7 +93,7 @@ public class HistogramParser implements Aggregator.Parser {
order = resolveOrder(currentFieldName, asc);
}
}
} else if (EXTENDED_BOUNDS.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, EXTENDED_BOUNDS)) {
extendedBounds = new ExtendedBounds();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {

View File

@ -20,14 +20,9 @@ package org.elasticsearch.search.aggregations.bucket.sampler;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.bucket.BestDocsDeferringCollector;
import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector;
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
@ -111,9 +106,9 @@ public class SamplerAggregator extends SingleBucketAggregator {
};
public static ExecutionMode fromString(String value) {
public static ExecutionMode fromString(String value, ParseFieldMatcher parseFieldMatcher) {
for (ExecutionMode mode : values()) {
if (mode.parseField.match(value)) {
if (parseFieldMatcher.match(value, mode.parseField)) {
return mode;
}
}
@ -222,7 +217,7 @@ public class SamplerAggregator extends SingleBucketAggregator {
if (valuesSource instanceof ValuesSource.Bytes) {
ExecutionMode execution = null;
if (executionHint != null) {
execution = ExecutionMode.fromString(executionHint);
execution = ExecutionMode.fromString(executionHint, context.searchContext().parseFieldMatcher());
}
// In some cases using ordinals is just not supported: override

View File

@ -66,9 +66,9 @@ public class SamplerParser implements Aggregator.Parser {
} else if (vsParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (SHARD_SIZE_FIELD.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, SHARD_SIZE_FIELD)) {
shardSize = parser.intValue();
} else if (MAX_DOCS_PER_VALUE_FIELD.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, MAX_DOCS_PER_VALUE_FIELD)) {
diversityChoiceMade = true;
maxDocsPerValue = parser.intValue();
} else {
@ -76,7 +76,7 @@ public class SamplerParser implements Aggregator.Parser {
+ aggregationName, parser.getTokenLocation());
}
} else if (!vsParser.token(currentFieldName, token, parser)) {
if (EXECUTION_HINT_FIELD.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, EXECUTION_HINT_FIELD)) {
executionHint = parser.text();
} else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].",

View File

@ -25,15 +25,12 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.index.FilterableTermsEnum;
import org.elasticsearch.common.lucene.index.FreqTermsEnum;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
@ -103,9 +100,9 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
}
};
public static ExecutionMode fromString(String value) {
public static ExecutionMode fromString(String value, ParseFieldMatcher parseFieldMatcher) {
for (ExecutionMode mode : values()) {
if (mode.parseField.match(value)) {
if (parseFieldMatcher.match(value, mode.parseField)) {
return mode;
}
}
@ -184,7 +181,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
if (valuesSource instanceof ValuesSource.Bytes) {
ExecutionMode execution = null;
if (executionHint != null) {
execution = ExecutionMode.fromString(executionHint);
execution = ExecutionMode.fromString(executionHint, aggregationContext.searchContext().parseFieldMatcher());
}
if (!(valuesSource instanceof ValuesSource.Bytes.WithOrdinals)) {
execution = ExecutionMode.MAP;

View File

@ -64,8 +64,8 @@ public class SignificantTermsParametersParser extends AbstractTermsParametersPar
if (token == XContentParser.Token.START_OBJECT) {
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserMapper.get(currentFieldName);
if (significanceHeuristicParser != null) {
significanceHeuristic = significanceHeuristicParser.parse(parser);
} else if (BACKGROUND_FILTER.match(currentFieldName)) {
significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, BACKGROUND_FILTER)) {
filter = context.queryParserService().parseInnerFilter(parser).query();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["

View File

@ -23,6 +23,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -114,12 +115,12 @@ public class GND extends NXYSignificanceHeuristic {
}
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
String givenName = parser.currentName();
boolean backgroundIsSuperset = true;
XContentParser.Token token = parser.nextToken();
while (!token.equals(XContentParser.Token.END_OBJECT)) {
if (BACKGROUND_IS_SUPERSET.match(parser.currentName(), ParseField.EMPTY_FLAGS)) {
if (parseFieldMatcher.match(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
parser.nextToken();
backgroundIsSuperset = parser.booleanValue();
} else {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -36,7 +37,7 @@ public class JLHScore extends SignificanceHeuristic {
protected static final String[] NAMES = {"jlh"};
private JLHScore() {};
private JLHScore() {}
public static final SignificanceHeuristicStreams.Stream STREAM = new SignificanceHeuristicStreams.Stream() {
@Override
@ -107,7 +108,7 @@ public class JLHScore extends SignificanceHeuristic {
public static class JLHScoreParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
// move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken());

View File

@ -23,6 +23,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@ -137,16 +138,16 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
public static abstract class NXYParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
String givenName = parser.currentName();
boolean includeNegatives = false;
boolean backgroundIsSuperset = true;
XContentParser.Token token = parser.nextToken();
while (!token.equals(XContentParser.Token.END_OBJECT)) {
if (INCLUDE_NEGATIVES_FIELD.match(parser.currentName())) {
if (parseFieldMatcher.match(parser.currentName(), INCLUDE_NEGATIVES_FIELD)) {
parser.nextToken();
includeNegatives = parser.booleanValue();
} else if (BACKGROUND_IS_SUPERSET.match(parser.currentName())) {
} else if (parseFieldMatcher.match(parser.currentName(), BACKGROUND_IS_SUPERSET)) {
parser.nextToken();
backgroundIsSuperset = parser.booleanValue();
} else {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -36,7 +37,7 @@ public class PercentageScore extends SignificanceHeuristic {
protected static final String[] NAMES = {"percentage"};
private PercentageScore() {};
private PercentageScore() {}
public static final SignificanceHeuristicStreams.Stream STREAM = new SignificanceHeuristicStreams.Stream() {
@Override
@ -76,7 +77,7 @@ public class PercentageScore extends SignificanceHeuristic {
public static class PercentageScoreParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
// move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());

View File

@ -23,6 +23,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -122,15 +123,15 @@ public class ScriptHeuristic extends SignificanceHeuristic {
public static class ScriptHeuristicParser implements SignificanceHeuristicParser {
private final ScriptService scriptService;
@Inject
public ScriptHeuristicParser(ScriptService scriptService) {
this.scriptService = scriptService;
}
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
String heuristicName = parser.currentName();
NAMES_FIELD.match(heuristicName, ParseField.EMPTY_FLAGS);
Script script = null;
XContentParser.Token token;
Map<String, Object> params = null;
@ -140,14 +141,14 @@ public class ScriptHeuristic extends SignificanceHeuristic {
if (token.equals(XContentParser.Token.FIELD_NAME)) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
} else if ("params".equals(currentFieldName)) { // TODO remove in 2.0 (here to support old script APIs)
if (parseFieldMatcher.match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, parseFieldMatcher);
} else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs)
params = parser.map();
} else {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
}
} else if (!scriptParameterParser.token(currentFieldName, token, parser)) {
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseFieldMatcher)) {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", heuristicName, currentFieldName);
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParsingException;
@ -27,7 +28,7 @@ import java.io.IOException;
public interface SignificanceHeuristicParser {
SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException;
SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException;
String[] getNames();
}

View File

@ -77,23 +77,23 @@ public abstract class AbstractTermsParametersParser {
} else if (incExcParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_STRING) {
if (EXECUTION_HINT_FIELD_NAME.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, EXECUTION_HINT_FIELD_NAME)) {
executionHint = parser.text();
} else if(SubAggCollectionMode.KEY.match(currentFieldName)){
collectMode = SubAggCollectionMode.parse(parser.text());
} else if (REQUIRED_SIZE_FIELD_NAME.match(currentFieldName)) {
} else if(context.parseFieldMatcher().match(currentFieldName, SubAggCollectionMode.KEY)){
collectMode = SubAggCollectionMode.parse(parser.text(), context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
bucketCountThresholds.setRequiredSize(parser.intValue());
} else {
parseSpecial(aggregationName, parser, context, token, currentFieldName);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (REQUIRED_SIZE_FIELD_NAME.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, REQUIRED_SIZE_FIELD_NAME)) {
bucketCountThresholds.setRequiredSize(parser.intValue());
} else if (SHARD_SIZE_FIELD_NAME.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, SHARD_SIZE_FIELD_NAME)) {
bucketCountThresholds.setShardSize(parser.intValue());
} else if (MIN_DOC_COUNT_FIELD_NAME.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, MIN_DOC_COUNT_FIELD_NAME)) {
bucketCountThresholds.setMinDocCount(parser.intValue());
} else if (SHARD_MIN_DOC_COUNT_FIELD_NAME.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, SHARD_MIN_DOC_COUNT_FIELD_NAME)) {
bucketCountThresholds.setShardMinDocCount(parser.longValue());
} else {
parseSpecial(aggregationName, parser, context, token, currentFieldName);

View File

@ -20,12 +20,9 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
@ -125,9 +122,9 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
}
};
public static ExecutionMode fromString(String value) {
public static ExecutionMode fromString(String value, ParseFieldMatcher parseFieldMatcher) {
for (ExecutionMode mode : values()) {
if (mode.parseField.match(value)) {
if (parseFieldMatcher.match(value, mode.parseField)) {
return mode;
}
}
@ -201,7 +198,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
if (valuesSource instanceof ValuesSource.Bytes) {
ExecutionMode execution = null;
if (executionHint != null) {
execution = ExecutionMode.fromString(executionHint);
execution = ExecutionMode.fromString(executionHint, aggregationContext.searchContext().parseFieldMatcher());
}
// In some cases, using ordinals is just not supported: override it
@ -243,7 +240,6 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
}
}
assert execution != null;
return execution.create(name, factories, valuesSource, order, bucketCountThresholds, includeExclude, aggregationContext,
parent, collectMode, showTermDocCountError, pipelineAggregators, metaData);
}

View File

@ -76,7 +76,7 @@ public class TermsParametersParser extends AbstractTermsParametersParser {
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if (SHOW_TERM_DOC_COUNT_ERROR.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, SHOW_TERM_DOC_COUNT_ERROR)) {
showTermDocCountError = parser.booleanValue();
}
} else {

View File

@ -59,7 +59,7 @@ public class CardinalityParser implements Aggregator.Parser {
} else if (token.isValue()) {
if ("rehash".equals(currentFieldName)) {
rehash = parser.booleanValue();
} else if (PRECISION_THRESHOLD.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, PRECISION_THRESHOLD)) {
precisionThreshold = parser.longValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + name + "]: [" + currentFieldName

View File

@ -74,24 +74,24 @@ public class ScriptedMetricParser implements Aggregator.Parser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (INIT_SCRIPT_FIELD.match(currentFieldName)) {
initScript = Script.parse(parser);
} else if (MAP_SCRIPT_FIELD.match(currentFieldName)) {
mapScript = Script.parse(parser);
} else if (COMBINE_SCRIPT_FIELD.match(currentFieldName)) {
combineScript = Script.parse(parser);
} else if (REDUCE_SCRIPT_FIELD.match(currentFieldName)) {
reduceScript = Script.parse(parser);
} else if (PARAMS_FIELD.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, INIT_SCRIPT_FIELD)) {
initScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, MAP_SCRIPT_FIELD)) {
mapScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, COMBINE_SCRIPT_FIELD)) {
combineScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, REDUCE_SCRIPT_FIELD)) {
reduceScript = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) {
params = parser.map();
} else if (REDUCE_PARAMS_FIELD.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, REDUCE_PARAMS_FIELD)) {
reduceParams = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token.isValue()) {
if (!scriptParameterParser.token(currentFieldName, token, parser)) {
if (!scriptParameterParser.token(currentFieldName, token, parser, context.parseFieldMatcher())) {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}

View File

@ -62,7 +62,7 @@ public class ExtendedStatsParser implements Aggregator.Parser {
} else if (vsParser.token(currentFieldName, token, parser)) {
continue;
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (SIGMA.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, SIGMA)) {
sigma = parser.doubleValue();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: ["

View File

@ -65,7 +65,7 @@ public class BucketHelpers {
public static GapPolicy parse(SearchContext context, String text, XContentLocation tokenLocation) {
GapPolicy result = null;
for (GapPolicy policy : values()) {
if (policy.parseField.match(text)) {
if (context.parseFieldMatcher().match(text, policy.parseField)) {
if (result == null) {
result = policy;
} else {
@ -94,9 +94,6 @@ public class BucketHelpers {
/**
* Serialize the GapPolicy to the output stream
*
* @param out
* @throws IOException
*/
public void writeTo(StreamOutput out) throws IOException {
out.writeByte(id);
@ -136,7 +133,7 @@ public class BucketHelpers {
* bucket). If the bucket is empty, the configured GapPolicy is invoked to
* resolve the missing bucket
*
* @param histo
* @param agg
* A series of agg buckets in the form of a histogram
* @param bucket
* A specific bucket that a value needs to be extracted from.

View File

@ -59,18 +59,18 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} else if (doParse(pipelineAggregatorName, currentFieldName, token, parser, context)) {
// Do nothing as subclass has stored the state for this token
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();

View File

@ -61,21 +61,21 @@ public class BucketScriptParser implements PipelineAggregator.Parser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
bucketsPathsMap = new HashMap<>();
bucketsPathsMap.put("_value", parser.text());
} else if (GAP_POLICY.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
} else if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, context.parseFieldMatcher());
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();
@ -90,9 +90,9 @@ public class BucketScriptParser implements PipelineAggregator.Parser {
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
} else if (BUCKETS_PATH.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, context.parseFieldMatcher());
} else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
Map<String, Object> map = parser.map();
bucketsPathsMap = new HashMap<>();
for (Map.Entry<String, Object> entry : map.entrySet()) {

View File

@ -54,16 +54,16 @@ public class CumulativeSumParser implements PipelineAggregator.Parser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
bucketsPaths = new String[] { parser.text() };
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();

View File

@ -60,20 +60,20 @@ public class DerivativeParser implements PipelineAggregator.Parser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else if (UNIT.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, UNIT)) {
units = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();

View File

@ -74,14 +74,14 @@ public class MovAvgParser implements PipelineAggregator.Parser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (WINDOW.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, WINDOW)) {
window = parser.intValue();
if (window <= 0) {
throw new SearchParseException(context, "[" + currentFieldName + "] value must be a positive, "
+ "non-zero integer. Value supplied was [" + predict + "] in [" + pipelineAggregatorName + "].",
parser.getTokenLocation());
}
} else if (PREDICT.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, PREDICT)) {
predict = parser.intValue();
if (predict <= 0) {
throw new SearchParseException(context, "[" + currentFieldName + "] value must be a positive, "
@ -93,20 +93,20 @@ public class MovAvgParser implements PipelineAggregator.Parser {
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else if (MODEL.match(currentFieldName)) {
} else if (context.parseFieldMatcher().match(currentFieldName, MODEL)) {
model = parser.text();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: ["
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();
@ -118,7 +118,7 @@ public class MovAvgParser implements PipelineAggregator.Parser {
+ currentFieldName + "].", parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SETTINGS.match(currentFieldName)) {
if (context.parseFieldMatcher().match(currentFieldName, SETTINGS)) {
settings = parser.map();
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: ["
@ -150,7 +150,7 @@ public class MovAvgParser implements PipelineAggregator.Parser {
MovAvgModel movAvgModel;
try {
movAvgModel = modelParser.parse(settings, pipelineAggregatorName, window);
movAvgModel = modelParser.parse(settings, pipelineAggregatorName, window, context.parseFieldMatcher());
} catch (ParseException exception) {
throw new SearchParseException(context, "Could not parse settings for model [" + model + "].", null, exception);
}

View File

@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.text.ParseException;
@ -103,7 +103,7 @@ public class EwmaModel extends MovAvgModel {
}
@Override
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize, ParseFieldMatcher parseFieldMatcher) throws ParseException {
double alpha = parseDoubleParam(settings, "alpha", 0.5);

View File

@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.text.ParseException;
@ -152,7 +152,7 @@ public class HoltLinearModel extends MovAvgModel {
}
@Override
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize, ParseFieldMatcher parseFieldMatcher) throws ParseException {
double alpha = parseDoubleParam(settings, "alpha", 0.5);
double beta = parseDoubleParam(settings, "beta", 0.5);

View File

@ -23,6 +23,7 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -47,16 +48,17 @@ public class HoltWintersModel extends MovAvgModel {
* Parse a string SeasonalityType into the byte enum
*
* @param text SeasonalityType in string format (e.g. "add")
* @param parseFieldMatcher Matcher for field names
* @return SeasonalityType enum
*/
@Nullable
public static SeasonalityType parse(String text) {
public static SeasonalityType parse(String text, ParseFieldMatcher parseFieldMatcher) {
if (text == null) {
return null;
}
SeasonalityType result = null;
for (SeasonalityType policy : values()) {
if (policy.parseField.match(text)) {
if (parseFieldMatcher.match(text, policy.parseField)) {
result = policy;
break;
}
@ -81,9 +83,6 @@ public class HoltWintersModel extends MovAvgModel {
/**
* Serialize the SeasonalityType to the output stream
*
* @param out
* @throws IOException
*/
public void writeTo(StreamOutput out) throws IOException {
out.writeByte(id);
@ -92,7 +91,7 @@ public class HoltWintersModel extends MovAvgModel {
/**
* Deserialize the SeasonalityType from the input stream
*
* @param in
* @param in the input stream
* @return SeasonalityType Enum
* @throws IOException
*/
@ -311,7 +310,7 @@ public class HoltWintersModel extends MovAvgModel {
}
@Override
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize, ParseFieldMatcher parseFieldMatcher) throws ParseException {
double alpha = parseDoubleParam(settings, "alpha", 0.5);
double beta = parseDoubleParam(settings, "beta", 0.5);
@ -330,7 +329,7 @@ public class HoltWintersModel extends MovAvgModel {
Object value = settings.get("type");
if (value != null) {
if (value instanceof String) {
seasonalityType = SeasonalityType.parse((String)value);
seasonalityType = SeasonalityType.parse((String)value, parseFieldMatcher);
} else {
throw new ParseException("Parameter [type] must be a String, type `"
+ value.getClass().getSimpleName() + "` provided instead", 0);

View File

@ -22,11 +22,11 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.text.ParseException;
@ -91,7 +91,7 @@ public class LinearModel extends MovAvgModel {
}
@Override
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize, ParseFieldMatcher parseFieldMatcher) throws ParseException {
return new LinearModel();
}
}

View File

@ -20,9 +20,9 @@
package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.text.ParseException;
@ -85,8 +85,6 @@ public abstract class MovAvgModel {
/**
* Returns an empty set of predictions, filled with NaNs
* @param numPredictions
* @return
*/
protected double[] emptyPredictions(int numPredictions) {
double[] predictions = new double[numPredictions];
@ -120,9 +118,10 @@ public abstract class MovAvgModel {
* @param settings Map of settings, extracted from the request
* @param pipelineName Name of the parent pipeline agg
* @param windowSize Size of the window for this moving avg
* @param parseFieldMatcher Matcher for field names
* @return A fully built moving average model
*/
public abstract MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException;
public abstract MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize, ParseFieldMatcher parseFieldMatcher) throws ParseException;
/**

View File

@ -21,11 +21,11 @@ package org.elasticsearch.search.aggregations.pipeline.movavg.models;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgParser;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.text.ParseException;
@ -84,7 +84,7 @@ public class SimpleModel extends MovAvgModel {
}
@Override
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize) throws ParseException {
public MovAvgModel parse(@Nullable Map<String, Object> settings, String pipelineName, int windowSize, ParseFieldMatcher parseFieldMatcher) throws ParseException {
return new SimpleModel();
}
}

View File

@ -48,7 +48,7 @@ public class GeoPointParser {
}
public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser) throws IOException {
if (!field.match(currentFieldName)) {
if (!context.parseFieldMatcher().match(currentFieldName, field)) {
return false;
}
if (token == XContentParser.Token.VALUE_STRING) {

View File

@ -70,7 +70,7 @@ public class ValuesSourceParser<VS extends ValuesSource> {
String field = null;
Script script = null;
@Deprecated
Map<String, Object> params = null; // TODO Remove in 2.0
Map<String, Object> params = null; // TODO Remove in 3.0
ValueType valueType = null;
String format = null;
Object missing = null;
@ -114,7 +114,7 @@ public class ValuesSourceParser<VS extends ValuesSource> {
"] aggregation can only work on value of type [" + targetValueType + "]",
parser.getTokenLocation());
}
} else if (!scriptParameterParser.token(currentFieldName, token, parser)) {
} else if (!scriptParameterParser.token(currentFieldName, token, parser, context.parseFieldMatcher())) {
return false;
}
return true;
@ -124,8 +124,8 @@ public class ValuesSourceParser<VS extends ValuesSource> {
return true;
}
if (scriptable && token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentFieldName)) {
input.script = Script.parse(parser);
if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
input.script = Script.parse(parser, context.parseFieldMatcher());
return true;
} else if ("params".equals(currentFieldName)) {
input.params = parser.map();

View File

@ -67,8 +67,8 @@ public class ScriptFieldsParseElement implements SearchParseElement {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentFieldName)) {
script = Script.parse(parser);
if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, context.parseFieldMatcher());
} else if ("params".equals(currentFieldName)) {
params = parser.map();
}
@ -76,7 +76,7 @@ public class ScriptFieldsParseElement implements SearchParseElement {
if ("ignore_failure".equals(currentFieldName)) {
ignoreException = parser.booleanValue();
} else {
scriptParameterParser.token(currentFieldName, token, parser);
scriptParameterParser.token(currentFieldName, token, parser, context.parseFieldMatcher());
}
}
}

View File

@ -22,22 +22,12 @@ package org.elasticsearch.search.internal;
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.HasContext;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.HasHeaders;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.*;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.Queries;
@ -74,10 +64,7 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.scan.ScanContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.*;
/**
*
@ -140,7 +127,8 @@ public class DefaultSearchContext extends SearchContext {
public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard,
ScriptService scriptService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, Counter timeEstimateCounter) {
BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher) {
super(parseFieldMatcher);
this.id = id;
this.request = request;
this.searchType = request.searchType();
@ -368,8 +356,6 @@ public class DefaultSearchContext extends SearchContext {
/**
* A shortcut function to see whether there is a fetchSourceContext and it says the source is requested.
*
* @return
*/
@Override
public boolean sourceRequested() {

View File

@ -26,9 +26,7 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.HasContext;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.HasHeaders;
import org.elasticsearch.common.*;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService;
@ -61,13 +59,13 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.util.List;
import java.util.Set;
/**
*/
public abstract class FilteredSearchContext extends SearchContext {
private final SearchContext in;
public FilteredSearchContext(SearchContext in) {
//inner_hits in percolator ends up with null inner search context
super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher());
this.in = in;
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
@ -65,8 +66,6 @@ import java.util.Collection;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
/**
*/
public abstract class SearchContext implements Releasable, HasContextAndHeaders {
private static ThreadLocal<SearchContext> current = new ThreadLocal<>();
@ -89,6 +88,16 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
private Multimap<Lifetime, Releasable> clearables = null;
private final AtomicBoolean closed = new AtomicBoolean(false);
protected final ParseFieldMatcher parseFieldMatcher;
protected SearchContext(ParseFieldMatcher parseFieldMatcher) {
this.parseFieldMatcher = parseFieldMatcher;
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
@Override
public final void close() {
if (closed.compareAndSet(false, true)) { // prevent double release
@ -181,8 +190,6 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
/**
* A shortcut function to see whether there is a fetchSourceContext and it says the source is requested.
*
* @return
*/
public abstract boolean sourceRequested();
@ -315,7 +322,7 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
public abstract FetchSearchResult fetchResult();
/**
* Schedule the release of a resource. The time when {@link Releasable#release()} will be called on this object
* Schedule the release of a resource. The time when {@link Releasable#close()} will be called on this object
* is function of the provided {@link Lifetime}.
*/
public void addReleasable(Releasable releasable, Lifetime lifetime) {
@ -366,6 +373,6 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
/**
* This life time is for objects that need to live until the search context they are attached to is destroyed.
*/
CONTEXT;
CONTEXT
}
}

View File

@ -84,8 +84,8 @@ public class ScriptSortParser implements SortParser {
if (token == XContentParser.Token.FIELD_NAME) {
currentName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (ScriptField.SCRIPT.match(currentName)) {
script = Script.parse(parser);
if (context.parseFieldMatcher().match(currentName, ScriptField.SCRIPT)) {
script = Script.parse(parser, context.parseFieldMatcher());
} else if ("params".equals(currentName)) {
params = parser.map();
} else if ("nested_filter".equals(currentName) || "nestedFilter".equals(currentName)) {
@ -99,7 +99,7 @@ public class ScriptSortParser implements SortParser {
reverse = parser.booleanValue();
} else if ("order".equals(currentName)) {
reverse = "desc".equals(parser.text());
} else if (scriptParameterParser.token(currentName, token, parser)) {
} else if (scriptParameterParser.token(currentName, token, parser, context.parseFieldMatcher())) {
// Do Nothing (handled by ScriptParameterParser
} else if ("type".equals(currentName)) {
type = parser.text();

View File

@ -157,13 +157,13 @@ public class SortParseElement implements SearchParseElement {
}
} else if ("missing".equals(innerJsonName)) {
missing = parser.textOrNull();
} else if (IGNORE_UNMAPPED.match(innerJsonName)) {
} else if (context.parseFieldMatcher().match(innerJsonName, IGNORE_UNMAPPED)) {
// backward compatibility: ignore_unmapped has been replaced with unmapped_type
if (unmappedType == null // don't override if unmapped_type has been provided too
&& parser.booleanValue()) {
unmappedType = LongFieldMapper.CONTENT_TYPE;
}
} else if (UNMAPPED_TYPE.match(innerJsonName)) {
} else if (context.parseFieldMatcher().match(innerJsonName, UNMAPPED_TYPE)) {
unmappedType = parser.textOrNull();
} else if ("mode".equals(innerJsonName)) {
sortMode = MultiValueMode.fromString(parser.text());

View File

@ -30,6 +30,7 @@ import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.automaton.LevenshteinAutomata;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.FastCharArrayReader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.CustomAnalyzer;
@ -190,29 +191,29 @@ public final class SuggestUtils {
}
public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName,
DirectSpellcheckerSettings suggestion) throws IOException {
DirectSpellcheckerSettings suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException {
if ("accuracy".equals(fieldName)) {
suggestion.accuracy(parser.floatValue());
} else if (Fields.SUGGEST_MODE.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.SUGGEST_MODE)) {
suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text()));
} else if ("sort".equals(fieldName)) {
suggestion.sort(SuggestUtils.resolveSort(parser.text()));
} else if (Fields.STRING_DISTANCE.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) {
suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text()));
} else if (Fields.MAX_EDITS.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_EDITS)) {
suggestion.maxEdits(parser.intValue());
if (suggestion.maxEdits() < 1 || suggestion.maxEdits() > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
throw new IllegalArgumentException("Illegal max_edits value " + suggestion.maxEdits());
}
} else if (Fields.MAX_INSPECTIONS.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_INSPECTIONS)) {
suggestion.maxInspections(parser.intValue());
} else if (Fields.MAX_TERM_FREQ.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_TERM_FREQ)) {
suggestion.maxTermFreq(parser.floatValue());
} else if (Fields.PREFIX_LENGTH.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.PREFIX_LENGTH)) {
suggestion.prefixLength(parser.intValue());
} else if (Fields.MIN_WORD_LENGTH.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.MIN_WORD_LENGTH)) {
suggestion.minQueryLength(parser.intValue());
} else if (Fields.MIN_DOC_FREQ.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.MIN_DOC_FREQ)) {
suggestion.minDocFreq(parser.floatValue());
} else {
return false;
@ -221,7 +222,7 @@ public final class SuggestUtils {
}
public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName,
SuggestionSearchContext.SuggestionContext suggestion) throws IOException {
SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException {
if ("analyzer".equals(fieldName)) {
String analyzerName = parser.text();
@ -234,7 +235,7 @@ public final class SuggestUtils {
suggestion.setField(parser.text());
} else if ("size".equals(fieldName)) {
suggestion.setSize(parser.intValue());
} else if (Fields.SHARD_SIZE.match(fieldName)) {
} else if (parseFieldMatcher.match(fieldName, Fields.SHARD_SIZE)) {
suggestion.setShardSize(parser.intValue());
} else {
return false;

View File

@ -60,7 +60,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if (!parseSuggestContext(parser, mapperService, fieldName, suggestion)) {
if (!parseSuggestContext(parser, mapperService, fieldName, suggestion, queryParserService.parseFieldMatcher())) {
if (token == XContentParser.Token.VALUE_BOOLEAN && "fuzzy".equals(fieldName)) {
suggestion.setFuzzy(parser.booleanValue());
}
@ -73,7 +73,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) {
fuzzyConfigName = parser.currentName();
} else if (token.isValue()) {
if (FUZZINESS.match(fuzzyConfigName, ParseField.EMPTY_FLAGS)) {
if (queryParserService.parseFieldMatcher().match(fuzzyConfigName, FUZZINESS)) {
suggestion.setFuzzyEditDistance(Fuzziness.parse(parser).asDistance());
} else if ("transpositions".equals(fuzzyConfigName)) {
suggestion.setFuzzyTranspositions(parser.booleanValue());

View File

@ -22,10 +22,10 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
@ -58,7 +58,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion)) {
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, queryParserService.parseFieldMatcher())) {
if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) {
suggestion.setRealWordErrorLikelihood(parser.floatValue());
if (suggestion.realworldErrorLikelyhood() <= 0.0) {
@ -104,7 +104,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
fieldName = parser.currentName();
}
if (token.isValue()) {
parseCandidateGenerator(parser, mapperService, fieldName, generator);
parseCandidateGenerator(parser, mapperService, fieldName, generator, queryParserService.parseFieldMatcher());
}
}
verifyGenerator(generator);
@ -139,7 +139,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
if (suggestion.getCollateQueryScript() != null) {
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
}
Template template = Template.parse(parser);
Template template = Template.parse(parser, queryParserService.parseFieldMatcher());
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH);
suggestion.setCollateQueryScript(compiledScript);
} else if ("params".equals(fieldName)) {
@ -321,8 +321,8 @@ public final class PhraseSuggestParser implements SuggestContextParser {
}
private void parseCandidateGenerator(XContentParser parser, MapperService mapperService, String fieldName,
PhraseSuggestionContext.DirectCandidateGenerator generator) throws IOException {
if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator)) {
PhraseSuggestionContext.DirectCandidateGenerator generator, ParseFieldMatcher parseFieldMatcher) throws IOException {
if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) {
if ("field".equals(fieldName)) {
generator.setField(parser.text());
if (mapperService.smartNameFieldType(generator.field()) == null) {

View File

@ -18,8 +18,7 @@
*/
package org.elasticsearch.search.suggest.term;
import java.io.IOException;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
@ -28,6 +27,8 @@ import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.io.IOException;
public final class TermSuggestParser implements SuggestContextParser {
private TermSuggester suggester;
@ -46,7 +47,7 @@ public final class TermSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
parseTokenValue(parser, mapperService, fieldName, suggestion, settings);
parseTokenValue(parser, mapperService, fieldName, suggestion, settings, queryParserService.parseFieldMatcher());
} else {
throw new IllegalArgumentException("suggester[term] doesn't support field [" + fieldName + "]");
}
@ -55,9 +56,9 @@ public final class TermSuggestParser implements SuggestContextParser {
}
private void parseTokenValue(XContentParser parser, MapperService mapperService, String fieldName, TermSuggestionContext suggestion,
DirectSpellcheckerSettings settings) throws IOException {
if (!(SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion) || SuggestUtils.parseDirectSpellcheckerSettings(
parser, fieldName, settings))) {
DirectSpellcheckerSettings settings, ParseFieldMatcher parseFieldMatcher) throws IOException {
if (!(SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher) || SuggestUtils.parseDirectSpellcheckerSettings(
parser, fieldName, settings, parseFieldMatcher))) {
throw new IllegalArgumentException("suggester[term] doesn't support [" + fieldName + "]");
}

View File

@ -118,7 +118,7 @@ public class TransportClientNodesServiceTests extends ElasticsearchTestCase {
throw new IllegalArgumentException();
}
iteration.transportService.sendRequest(node, "action", new TestRequest(), new TransportRequestOptions().withTimeout(50), new BaseTransportResponseHandler<TestResponse>() {
iteration.transportService.sendRequest(node, "action", new TestRequest(), new TransportRequestOptions(), new BaseTransportResponseHandler<TestResponse>() {
@Override
public TestResponse newInstance() {
return new TestResponse();

View File

@ -20,6 +20,7 @@
package org.elasticsearch.script;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.ToXContent.MapParams;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -32,12 +33,7 @@ import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
@ -52,15 +48,15 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
token = parser.nextToken();
}
ScriptParameterParser paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
paramParser = new ScriptParameterParser(null);
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
paramParser = new ScriptParameterParser(new HashSet<String>());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
}
@ -73,7 +69,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
token = parser.nextToken();
}
ScriptParameterParser paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
@ -83,7 +79,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
token = parser.nextToken();
}
paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
}
@ -96,7 +92,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
token = parser.nextToken();
}
ScriptParameterParser paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
@ -106,7 +102,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
token = parser.nextToken();
}
paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
}
@ -119,7 +115,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
token = parser.nextToken();
}
ScriptParameterParser paramParser = new ScriptParameterParser();
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(false));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false));
assertThat(paramParser.getDefaultScriptParameterValue(), nullValue());
assertThat(paramParser.getScriptParameterValue("script"), nullValue());
assertThat(paramParser.lang(), nullValue());
@ -135,7 +131,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
}
@ -150,7 +146,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
}
@ -165,7 +161,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
}
@ -180,14 +176,14 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser.token(parser.currentName(), parser.currentToken(), parser);
paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -200,14 +196,14 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser.token(parser.currentName(), parser.currentToken(), parser);
paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -220,14 +216,14 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser.token(parser.currentName(), parser.currentToken(), parser);
paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -240,14 +236,14 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser.token(parser.currentName(), parser.currentToken(), parser);
paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -260,14 +256,14 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser.token(parser.currentName(), parser.currentToken(), parser);
paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -280,14 +276,14 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
token = parser.nextToken();
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
paramParser.token(parser.currentName(), parser.currentToken(), parser);
paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT);
}
@Test
@ -308,7 +304,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -319,7 +315,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -330,7 +326,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.INDEXED);
@ -357,7 +353,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -368,7 +364,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -379,7 +375,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -390,7 +386,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.INDEXED);
@ -417,7 +413,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(false));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false));
assertThat(paramParser.getScriptParameterValue("other"), nullValue());
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
@ -447,7 +443,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -460,7 +456,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(false));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -473,7 +469,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
while (token != Token.VALUE_STRING) {
token = parser.nextToken();
}
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(true));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true));
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.INDEXED);
@ -498,7 +494,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser), equalTo(false));
assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false));
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz"), nullValue());
@ -518,21 +514,21 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Map<String, Object> config = new HashMap<>();
config.put("script", "scriptValue");
ScriptParameterParser paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
config = new HashMap<>();
config.put("script", "scriptValue");
paramParser = new ScriptParameterParser(null);
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
config = new HashMap<>();
config.put("script", "scriptValue");
paramParser = new ScriptParameterParser(new HashSet<String>());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -543,7 +539,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Map<String, Object> config = new HashMap<>();
config.put("script_file", "scriptValue");
ScriptParameterParser paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -551,7 +547,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
config = new HashMap<>();
config.put("scriptFile", "scriptValue");
paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -562,7 +558,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Map<String, Object> config = new HashMap<>();
config.put("script_id", "scriptValue");
ScriptParameterParser paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -570,7 +566,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
config = new HashMap<>();
config.put("scriptId", "scriptValue");
paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -581,7 +577,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Map<String, Object> config = new HashMap<>();
config.put("script_id", "scriptValue");
ScriptParameterParser paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, false);
paramParser.parseConfig(config, false, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.size(), equalTo(1));
@ -590,7 +586,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
config = new HashMap<>();
config.put("scriptId", "scriptValue");
paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, false);
paramParser.parseConfig(config, false, ParseFieldMatcher.STRICT);
assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.size(), equalTo(1));
@ -602,7 +598,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Map<String, Object> config = new HashMap<>();
config.put("foo", "bar");
ScriptParameterParser paramParser = new ScriptParameterParser();
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertThat(paramParser.getDefaultScriptParameterValue(), nullValue());
assertThat(paramParser.getScriptParameterValue("script"), nullValue());
assertThat(paramParser.lang(), nullValue());
@ -617,7 +613,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -630,7 +626,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -643,7 +639,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INDEXED);
assertThat(paramParser.lang(), nullValue());
assertThat(config.isEmpty(), equalTo(true));
@ -657,7 +653,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -668,7 +664,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -679,7 +675,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -690,7 +686,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -701,7 +697,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -712,7 +708,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
Set<String> parameters = Collections.singleton("foo");
ScriptParameterParser paramParser = new ScriptParameterParser(parameters);
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test
@ -732,7 +728,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.INDEXED);
@ -760,7 +756,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.INDEXED);
@ -788,7 +784,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, false);
paramParser.parseConfig(config, false, ParseFieldMatcher.STRICT);
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE);
assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.INDEXED);
@ -817,7 +813,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertThat(paramParser.getScriptParameterValue("other"), nullValue());
assertThat(paramParser.getScriptParameterValue("foo"), nullValue());
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
@ -848,7 +844,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("other"), nullValue());
assertThat(paramParser.getScriptParameterValue("other_file"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE);
assertThat(paramParser.getScriptParameterValue("bar"), nullValue());
assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.INDEXED);
@ -879,7 +875,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -900,7 +896,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -921,7 +917,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test(expected = ScriptParseException.class)
@ -942,7 +938,7 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue());
assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue());
assertThat(paramParser.lang(), nullValue());
paramParser.parseConfig(config, true);
paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT);
}
@Test
@ -1265,5 +1261,4 @@ public class ScriptParameterParserTest extends ElasticsearchTestCase {
assertThat(value.scriptType(), equalTo(expectedScriptType));
assertThat(value.script(), equalTo(expectedScript));
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
@ -43,16 +44,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.SignificantStrin
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificantTermsHeuristicModule;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.TransportSignificantTermsHeuristicModule;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
@ -60,12 +52,7 @@ import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
@ -73,10 +60,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.*;
/**
*
@ -234,7 +218,7 @@ public class SignificantTermsSignificanceScoreTests extends ElasticsearchIntegra
public static class SimpleHeuristicParser implements SignificanceHeuristicParser {
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryParsingException {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
parser.nextToken();
return new SimpleHeuristic();
}

View File

@ -20,17 +20,16 @@
package org.elasticsearch.search.aggregations.pipeline.moving.avg;
import com.google.common.collect.EvictingQueue;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.search.aggregations.pipeline.movavg.models.*;
import org.elasticsearch.test.ElasticsearchTestCase;
import static org.hamcrest.Matchers.equalTo;
import org.junit.Test;
import java.text.ParseException;
import java.util.*;
import static org.hamcrest.Matchers.equalTo;
public class MovAvgUnitTests extends ElasticsearchTestCase {
@Test
@ -519,7 +518,7 @@ public class MovAvgUnitTests extends ElasticsearchTestCase {
}
int seasonCounter = (windowSize - 1) - period;
double expected = s + (0 * b) + seasonal[seasonCounter % windowSize];;
double expected = s + (0 * b) + seasonal[seasonCounter % windowSize];
double actual = model.next(window);
assertThat(Double.compare(expected, actual), equalTo(0));
}
@ -619,7 +618,7 @@ public class MovAvgUnitTests extends ElasticsearchTestCase {
settings.put("gamma", v);
try {
parser.parse(settings, "pipeline", 10);
parser.parse(settings, "pipeline", 10, ParseFieldMatcher.STRICT);
} catch (ParseException e) {
fail(parser.getName() + " parser should not have thrown SearchParseException while parsing [" +
v.getClass().getSimpleName() +"]");
@ -634,7 +633,7 @@ public class MovAvgUnitTests extends ElasticsearchTestCase {
settings.put("gamma", "abc");
try {
parser.parse(settings, "pipeline", 10);
parser.parse(settings, "pipeline", 10, ParseFieldMatcher.STRICT);
} catch (ParseException e) {
//all good
continue;

View File

@ -27,9 +27,7 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.HasContext;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.HasHeaders;
import org.elasticsearch.common.*;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
@ -85,6 +83,7 @@ public class TestSearchContext extends SearchContext {
private SearchContextAggregations aggregations;
public TestSearchContext(ThreadPool threadPool,PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, IndexService indexService, QueryCache filterCache, IndexFieldDataService indexFieldDataService) {
super(ParseFieldMatcher.STRICT);
this.pageCacheRecycler = pageCacheRecycler;
this.bigArrays = bigArrays.withCircuitBreaking();
this.indexService = indexService;
@ -94,6 +93,7 @@ public class TestSearchContext extends SearchContext {
}
public TestSearchContext() {
super(ParseFieldMatcher.STRICT);
this.pageCacheRecycler = null;
this.bigArrays = null;
this.indexService = null;

View File

@ -26,3 +26,6 @@
* https://github.com/kodcu/pes[Pes]:
A pluggable elastic query DSL builder for Elasticsearch
* https://github.com/ozlerhakan/mongolastic[Mongolastic]:
A tool that clone data from ElasticSearch to MongoDB and vice versa

View File

@ -25,11 +25,11 @@ Download and install the Public Signing Key:
wget -qO - https://packages.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
--------------------------------------------------
Add the repository definition to your `/etc/apt/sources.list` file:
Save the repository definition to `/etc/apt/sources.list.d/elasticsearch-{branch}.list`:
["source","sh",subs="attributes,callouts"]
--------------------------------------------------
echo "deb http://packages.elastic.co/elasticsearch/{branch}/debian stable main" | sudo tee -a /etc/apt/sources.list
echo "deb http://packages.elastic.co/elasticsearch/{branch}/debian stable main" | sudo tee -a /etc/apt/sources.list.d/elasticsearch-{branch}.list
--------------------------------------------------
[WARNING]
@ -51,6 +51,17 @@ Run apt-get update and the repository is ready for use. You can install it with:
sudo apt-get update && sudo apt-get install elasticsearch
--------------------------------------------------
[WARNING]
==================================================
If two entries exist for the same Elasticsearch repository, you will see an error like this during `apt-get update`:
["literal",subs="attributes,callouts"]
Duplicate sources.list entry http://packages.elastic.co/elasticsearch/{branch}/debian/ ...`
Examine +/etc/apt/sources.list.d/elasticsearch-{branch}.list+ for the duplicate entry or locate the duplicate entry amongst the files in `/etc/apt/sources.list.d/` and the `/etc/apt/sources.list` file.
==================================================
Configure Elasticsearch to automatically start during bootup. If your
distribution is using SysV init, then you will need to run: