Remove some more usages of ParseFieldMatcher in favour of using ParseField directly
Relates to #19552 Relates to #22130
This commit is contained in:
parent
6c54cbade4
commit
df2acb3d9d
|
@ -103,7 +103,7 @@ public class ParseFieldRegistry<T> {
|
|||
}
|
||||
ParseField parseField = parseFieldAndValue.v1();
|
||||
T value = parseFieldAndValue.v2();
|
||||
boolean match = parseFieldMatcher.match(name, parseField);
|
||||
boolean match = parseField.match(name);
|
||||
//this is always expected to match, ParseField is useful for deprecation warnings etc. here
|
||||
assert match : "ParseField did not match registered name [" + name + "][" + registryName + "]";
|
||||
return value;
|
||||
|
|
|
@ -359,15 +359,15 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (currentFieldName != null) {
|
||||
if (parseFieldMatcher.match(currentFieldName, Field.INDEX)) {
|
||||
if (Field.INDEX.match(currentFieldName)) {
|
||||
item.index = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Field.TYPE)) {
|
||||
} else if (Field.TYPE.match(currentFieldName)) {
|
||||
item.type = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Field.ID)) {
|
||||
} else if (Field.ID.match(currentFieldName)) {
|
||||
item.id = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Field.DOC)) {
|
||||
} else if (Field.DOC.match(currentFieldName)) {
|
||||
item.doc = jsonBuilder().copyCurrentStructure(parser).bytes();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Field.FIELDS)) {
|
||||
} else if (Field.FIELDS.match(currentFieldName)) {
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
List<String> fields = new ArrayList<>();
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -378,7 +378,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
throw new ElasticsearchParseException(
|
||||
"failed to parse More Like This item. field [fields] must be an array");
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Field.PER_FIELD_ANALYZER)) {
|
||||
} else if (Field.PER_FIELD_ANALYZER.match(currentFieldName)) {
|
||||
item.perFieldAnalyzer(TermVectorsRequest.readPerFieldAnalyzer(parser.map()));
|
||||
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
|
||||
item.routing = parser.text();
|
||||
|
|
|
@ -55,7 +55,7 @@ public class QueryParseContext implements ParseFieldMatcherSupplier {
|
|||
}
|
||||
|
||||
public boolean isDeprecatedSetting(String setting) {
|
||||
return this.parseFieldMatcher.match(setting, CACHE) || this.parseFieldMatcher.match(setting, CACHE_KEY);
|
||||
return this.CACHE.match(setting) || this.CACHE_KEY.match(setting);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -524,24 +524,24 @@ public class BlobStoreIndexShardSnapshot implements ToXContent {
|
|||
String currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, PARSE_NAME)) {
|
||||
if (PARSE_NAME.match(currentFieldName)) {
|
||||
snapshot = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARSE_INDEX_VERSION)) {
|
||||
} else if (PARSE_INDEX_VERSION.match(currentFieldName)) {
|
||||
// The index-version is needed for backward compatibility with v 1.0
|
||||
indexVersion = parser.longValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARSE_START_TIME)) {
|
||||
} else if (PARSE_START_TIME.match(currentFieldName)) {
|
||||
startTime = parser.longValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARSE_TIME)) {
|
||||
} else if (PARSE_TIME.match(currentFieldName)) {
|
||||
time = parser.longValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARSE_NUMBER_OF_FILES)) {
|
||||
} else if (PARSE_NUMBER_OF_FILES.match(currentFieldName)) {
|
||||
numberOfFiles = parser.intValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARSE_TOTAL_SIZE)) {
|
||||
} else if (PARSE_TOTAL_SIZE.match(currentFieldName)) {
|
||||
totalSize = parser.longValue();
|
||||
} else {
|
||||
throw new ElasticsearchParseException("unknown parameter [{}]", currentFieldName);
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, PARSE_FILES)) {
|
||||
if (PARSE_FILES.match(currentFieldName)) {
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
indexFiles.add(FileInfo.fromXContent(parser));
|
||||
}
|
||||
|
|
|
@ -245,7 +245,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
|
|||
String currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, ParseFields.FILES) == false) {
|
||||
if (ParseFields.FILES.match(currentFieldName) == false) {
|
||||
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
|
||||
}
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -253,7 +253,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
|
|||
files.put(fileInfo.name(), fileInfo);
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, ParseFields.SNAPSHOTS) == false) {
|
||||
if (ParseFields.SNAPSHOTS.match(currentFieldName) == false) {
|
||||
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
|
||||
}
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
@ -268,7 +268,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, ParseFields.FILES) == false) {
|
||||
if (ParseFields.FILES.match(currentFieldName) == false) {
|
||||
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
|
||||
}
|
||||
List<String> fileNames = new ArrayList<>();
|
||||
|
|
|
@ -83,9 +83,9 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.TEXT) && token == XContentParser.Token.VALUE_STRING) {
|
||||
} else if (Fields.TEXT.match(currentFieldName) && token == XContentParser.Token.VALUE_STRING) {
|
||||
analyzeRequest.text(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.TEXT) && token == XContentParser.Token.START_ARRAY) {
|
||||
} else if (Fields.TEXT.match(currentFieldName) && token == XContentParser.Token.START_ARRAY) {
|
||||
List<String> texts = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token.isValue() == false) {
|
||||
|
@ -94,11 +94,11 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
texts.add(parser.text());
|
||||
}
|
||||
analyzeRequest.text(texts.toArray(new String[texts.size()]));
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.ANALYZER) && token == XContentParser.Token.VALUE_STRING) {
|
||||
} else if (Fields.ANALYZER.match(currentFieldName) && token == XContentParser.Token.VALUE_STRING) {
|
||||
analyzeRequest.analyzer(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.FIELD) && token == XContentParser.Token.VALUE_STRING) {
|
||||
} else if (Fields.FIELD.match(currentFieldName) && token == XContentParser.Token.VALUE_STRING) {
|
||||
analyzeRequest.field(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.TOKENIZER)) {
|
||||
} else if (Fields.TOKENIZER.match(currentFieldName)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
analyzeRequest.tokenizer(parser.text());
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
|
@ -106,7 +106,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
} else {
|
||||
throw new IllegalArgumentException(currentFieldName + " should be tokenizer's name or setting");
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.TOKEN_FILTERS)
|
||||
} else if (Fields.TOKEN_FILTERS.match(currentFieldName)
|
||||
&& token == XContentParser.Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
|
@ -118,7 +118,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
+ " array element should contain filter's name or setting");
|
||||
}
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS)
|
||||
} else if (Fields.CHAR_FILTERS.match(currentFieldName)
|
||||
&& token == XContentParser.Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
|
@ -130,13 +130,13 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
+ " array element should contain char filter's name or setting");
|
||||
}
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.EXPLAIN)) {
|
||||
} else if (Fields.EXPLAIN.match(currentFieldName)) {
|
||||
if (parser.isBooleanValue()) {
|
||||
analyzeRequest.explain(parser.booleanValue());
|
||||
} else {
|
||||
throw new IllegalArgumentException(currentFieldName + " must be either 'true' or 'false'");
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.ATTRIBUTES) && token == XContentParser.Token.START_ARRAY) {
|
||||
} else if (Fields.ATTRIBUTES.match(currentFieldName) && token == XContentParser.Token.START_ARRAY) {
|
||||
List<String> attributes = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token.isValue() == false) {
|
||||
|
|
|
@ -132,7 +132,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
public static SubAggCollectionMode parse(String value, ParseFieldMatcher parseFieldMatcher) {
|
||||
SubAggCollectionMode[] modes = SubAggCollectionMode.values();
|
||||
for (SubAggCollectionMode mode : modes) {
|
||||
if (parseFieldMatcher.match(value, mode.parseField)) {
|
||||
if (mode.parseField.match(value)) {
|
||||
return mode;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,17 +134,17 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(currentFieldName, FROM_FIELD)) {
|
||||
if (FROM_FIELD.match(currentFieldName)) {
|
||||
from = parser.doubleValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, TO_FIELD)) {
|
||||
} else if (TO_FIELD.match(currentFieldName)) {
|
||||
to = parser.doubleValue();
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, FROM_FIELD)) {
|
||||
if (FROM_FIELD.match(currentFieldName)) {
|
||||
fromAsStr = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, TO_FIELD)) {
|
||||
} else if (TO_FIELD.match(currentFieldName)) {
|
||||
toAsStr = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, KEY_FIELD)) {
|
||||
} else if (KEY_FIELD.match(currentFieldName)) {
|
||||
key = parser.text();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -180,17 +180,17 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
toOrFromOrKey = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (parseFieldMatcher.match(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
if (Range.FROM_FIELD.match(toOrFromOrKey)) {
|
||||
from = parser.doubleValue();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
} else if (Range.TO_FIELD.match(toOrFromOrKey)) {
|
||||
to = parser.doubleValue();
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(toOrFromOrKey, Range.KEY_FIELD)) {
|
||||
if (Range.KEY_FIELD.match(toOrFromOrKey)) {
|
||||
key = parser.text();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.FROM_FIELD)) {
|
||||
} else if (Range.FROM_FIELD.match(toOrFromOrKey)) {
|
||||
fromAsStr = parser.text();
|
||||
} else if (parseFieldMatcher.match(toOrFromOrKey, Range.TO_FIELD)) {
|
||||
} else if (Range.TO_FIELD.match(toOrFromOrKey)) {
|
||||
toAsStr = parser.text();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -113,7 +113,7 @@ public class SamplerAggregator extends SingleBucketAggregator {
|
|||
|
||||
public static ExecutionMode fromString(String value, ParseFieldMatcher parseFieldMatcher) {
|
||||
for (ExecutionMode mode : values()) {
|
||||
if (parseFieldMatcher.match(value, mode.parseField)) {
|
||||
if (mode.parseField.match(value)) {
|
||||
return mode;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -291,7 +291,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
|
||||
public static ExecutionMode fromString(String value, ParseFieldMatcher parseFieldMatcher) {
|
||||
for (ExecutionMode mode : values()) {
|
||||
if (parseFieldMatcher.match(value, mode.parseField)) {
|
||||
if (mode.parseField.match(value)) {
|
||||
return mode;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -308,7 +308,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
|
||||
public static ExecutionMode fromString(String value, ParseFieldMatcher parseFieldMatcher) {
|
||||
for (ExecutionMode mode : values()) {
|
||||
if (parseFieldMatcher.match(value, mode.parseField)) {
|
||||
if (mode.parseField.match(value)) {
|
||||
return mode;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,11 +112,11 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
} else
|
||||
// This "include":{"pattern":"foo.*"} syntax is undocumented since 2.0
|
||||
// Regexes should be "include":"foo.*"
|
||||
if (parseFieldMatcher.match(currentFieldName, PATTERN_FIELD)) {
|
||||
if (PATTERN_FIELD.match(currentFieldName)) {
|
||||
return new IncludeExclude(parser.text(), null);
|
||||
} else if (parseFieldMatcher.match(currentFieldName, NUM_PARTITIONS_FIELD)) {
|
||||
} else if (NUM_PARTITIONS_FIELD.match(currentFieldName)) {
|
||||
numPartitions = parser.intValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PARTITION_FIELD)) {
|
||||
} else if (PARTITION_FIELD.match(currentFieldName)) {
|
||||
partition = parser.intValue();
|
||||
} else {
|
||||
throw new ElasticsearchParseException(
|
||||
|
|
|
@ -69,7 +69,7 @@ public class HoltWintersModel extends MovAvgModel {
|
|||
}
|
||||
SeasonalityType result = null;
|
||||
for (SeasonalityType policy : values()) {
|
||||
if (parseFieldMatcher.match(text, policy.parseField)) {
|
||||
if (policy.parseField.match(text)) {
|
||||
result = policy;
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ public class FetchSourceContext implements Writeable, ToXContent {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, INCLUDES_FIELD)) {
|
||||
if (INCLUDES_FIELD.match(currentFieldName)) {
|
||||
List<String> includesList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
|
@ -159,7 +159,7 @@ public class FetchSourceContext implements Writeable, ToXContent {
|
|||
}
|
||||
}
|
||||
includes = includesList.toArray(new String[includesList.size()]);
|
||||
} else if (parseFieldMatcher.match(currentFieldName, EXCLUDES_FIELD)) {
|
||||
} else if (EXCLUDES_FIELD.match(currentFieldName)) {
|
||||
List<String> excludesList = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
|
@ -175,9 +175,9 @@ public class FetchSourceContext implements Writeable, ToXContent {
|
|||
+ " in [" + currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, INCLUDES_FIELD)) {
|
||||
if (INCLUDES_FIELD.match(currentFieldName)) {
|
||||
includes = new String[] {parser.text()};
|
||||
} else if (parseFieldMatcher.match(currentFieldName, EXCLUDES_FIELD)) {
|
||||
} else if (EXCLUDES_FIELD.match(currentFieldName)) {
|
||||
excludes = new String[] {parser.text()};
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token
|
||||
|
|
|
@ -422,7 +422,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
|
||||
fieldName = currentName;
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentName, NESTED_FILTER_FIELD)) {
|
||||
if (NESTED_FILTER_FIELD.match(currentName)) {
|
||||
nestedFilter = context.parseInnerQueryBuilder();
|
||||
} else {
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
|
@ -439,27 +439,27 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
geoPoints.add(point);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentName, ORDER_FIELD)) {
|
||||
if (ORDER_FIELD.match(currentName)) {
|
||||
order = SortOrder.fromString(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentName, UNIT_FIELD)) {
|
||||
} else if (UNIT_FIELD.match(currentName)) {
|
||||
unit = DistanceUnit.fromString(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentName, DISTANCE_TYPE_FIELD)) {
|
||||
} else if (DISTANCE_TYPE_FIELD.match(currentName)) {
|
||||
geoDistance = GeoDistance.fromString(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentName, COERCE_FIELD)) {
|
||||
} else if (COERCE_FIELD.match(currentName)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentName, IGNORE_MALFORMED_FIELD)) {
|
||||
} else if (IGNORE_MALFORMED_FIELD.match(currentName)) {
|
||||
boolean ignore_malformed_value = parser.booleanValue();
|
||||
if (coerce == false) {
|
||||
ignoreMalformed = ignore_malformed_value;
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentName, VALIDATION_METHOD_FIELD)) {
|
||||
} else if (VALIDATION_METHOD_FIELD.match(currentName)) {
|
||||
validation = GeoValidationMethod.fromString(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentName, SORTMODE_FIELD)) {
|
||||
} else if (SORTMODE_FIELD.match(currentName)) {
|
||||
sortMode = SortMode.fromString(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentName, NESTED_PATH_FIELD)) {
|
||||
} else if (NESTED_PATH_FIELD.match(currentName)) {
|
||||
nestedPath = parser.text();
|
||||
} else if (token == Token.VALUE_STRING){
|
||||
if (fieldName != null && fieldName.equals(currentName) == false) {
|
||||
|
|
|
@ -154,7 +154,7 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(fieldName, GLOBAL_TEXT_FIELD)) {
|
||||
if (GLOBAL_TEXT_FIELD.match(fieldName)) {
|
||||
suggestBuilder.setGlobalText(parser.text());
|
||||
} else {
|
||||
throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]");
|
||||
|
|
|
@ -499,34 +499,34 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.ANALYZER_FIELD)) {
|
||||
if (SuggestionBuilder.ANALYZER_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.analyzer(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.FIELDNAME_FIELD)) {
|
||||
} else if (SuggestionBuilder.FIELDNAME_FIELD.match(currentFieldName)) {
|
||||
fieldname = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SIZE_FIELD)) {
|
||||
} else if (SuggestionBuilder.SIZE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.size(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SHARDSIZE_FIELD)) {
|
||||
} else if (SuggestionBuilder.SHARDSIZE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.shardSize(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.RWE_LIKELIHOOD_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.RWE_LIKELIHOOD_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.realWordErrorLikelihood(parser.floatValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.CONFIDENCE_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.CONFIDENCE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.confidence(parser.floatValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.SEPARATOR_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.SEPARATOR_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.separator(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.MAXERRORS_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.MAXERRORS_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.maxErrors(parser.floatValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.GRAMSIZE_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.GRAMSIZE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.gramSize(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.FORCE_UNIGRAM_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.FORCE_UNIGRAM_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.forceUnigrams(parser.booleanValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.TOKEN_LIMIT_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.TOKEN_LIMIT_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.tokenLimit(parser.intValue());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"suggester[phrase] doesn't support field [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD)) {
|
||||
if (DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD.match(currentFieldName)) {
|
||||
// for now we only have a single type of generators
|
||||
while ((token = parser.nextToken()) == Token.START_OBJECT) {
|
||||
tmpSuggestion.addCandidateGenerator(DirectCandidateGeneratorBuilder.fromXContent(parseContext));
|
||||
|
@ -536,19 +536,19 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
|
|||
"suggester[phrase] doesn't support array field [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.SMOOTHING_MODEL_FIELD)) {
|
||||
if (PhraseSuggestionBuilder.SMOOTHING_MODEL_FIELD.match(currentFieldName)) {
|
||||
ensureNoSmoothing(tmpSuggestion);
|
||||
tmpSuggestion.smoothingModel(SmoothingModel.fromXContent(parseContext));
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.HIGHLIGHT_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.HIGHLIGHT_FIELD.match(currentFieldName)) {
|
||||
String preTag = null;
|
||||
String postTag = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.PRE_TAG_FIELD)) {
|
||||
if (PhraseSuggestionBuilder.PRE_TAG_FIELD.match(currentFieldName)) {
|
||||
preTag = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.POST_TAG_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.POST_TAG_FIELD.match(currentFieldName)) {
|
||||
postTag = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
|
@ -557,11 +557,11 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
|
|||
}
|
||||
}
|
||||
tmpSuggestion.highlight(preTag, postTag);
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.COLLATE_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.COLLATE_FIELD.match(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.COLLATE_QUERY_FIELD)) {
|
||||
} else if (PhraseSuggestionBuilder.COLLATE_QUERY_FIELD.match(currentFieldName)) {
|
||||
if (tmpSuggestion.collateQuery() != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"suggester[phrase][collate] query already set, doesn't support additional ["
|
||||
|
@ -569,9 +569,9 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
|
|||
}
|
||||
Script template = Script.parse(parser, parseFieldMatcher, "mustache");
|
||||
tmpSuggestion.collateQuery(template);
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.COLLATE_QUERY_PARAMS)) {
|
||||
} else if (PhraseSuggestionBuilder.COLLATE_QUERY_PARAMS.match(currentFieldName)) {
|
||||
tmpSuggestion.collateParams(parser.map());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PhraseSuggestionBuilder.COLLATE_QUERY_PRUNE)) {
|
||||
} else if (PhraseSuggestionBuilder.COLLATE_QUERY_PRUNE.match(currentFieldName)) {
|
||||
if (parser.isBooleanValue()) {
|
||||
tmpSuggestion.collatePrune(parser.booleanValue());
|
||||
} else {
|
||||
|
|
|
@ -75,11 +75,11 @@ public abstract class SmoothingModel implements NamedWriteable, ToXContent {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) {
|
||||
if (LinearInterpolation.PARSE_FIELD.match(fieldName)) {
|
||||
model = LinearInterpolation.innerFromXContent(parseContext);
|
||||
} else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) {
|
||||
} else if (Laplace.PARSE_FIELD.match(fieldName)) {
|
||||
model = Laplace.innerFromXContent(parseContext);
|
||||
} else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) {
|
||||
} else if (StupidBackoff.PARSE_FIELD.match(fieldName)) {
|
||||
model = StupidBackoff.innerFromXContent(parseContext);
|
||||
} else {
|
||||
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
|
||||
|
|
|
@ -399,33 +399,33 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.ANALYZER_FIELD)) {
|
||||
if (SuggestionBuilder.ANALYZER_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.analyzer(parser.text());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.FIELDNAME_FIELD)) {
|
||||
} else if (SuggestionBuilder.FIELDNAME_FIELD.match(currentFieldName)) {
|
||||
fieldname = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SIZE_FIELD)) {
|
||||
} else if (SuggestionBuilder.SIZE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.size(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SHARDSIZE_FIELD)) {
|
||||
} else if (SuggestionBuilder.SHARDSIZE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.shardSize(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SUGGESTMODE_FIELD)) {
|
||||
} else if (SUGGESTMODE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.suggestMode(SuggestMode.resolve(parser.text()));
|
||||
} else if (parseFieldMatcher.match(currentFieldName, ACCURACY_FIELD)) {
|
||||
} else if (ACCURACY_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.accuracy(parser.floatValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SORT_FIELD)) {
|
||||
} else if (SORT_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.sort(SortBy.resolve(parser.text()));
|
||||
} else if (parseFieldMatcher.match(currentFieldName, STRING_DISTANCE_FIELD)) {
|
||||
} else if (STRING_DISTANCE_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.stringDistance(StringDistanceImpl.resolve(parser.text()));
|
||||
} else if (parseFieldMatcher.match(currentFieldName, MAX_EDITS_FIELD)) {
|
||||
} else if (MAX_EDITS_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.maxEdits(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, MAX_INSPECTIONS_FIELD)) {
|
||||
} else if (MAX_INSPECTIONS_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.maxInspections(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, MAX_TERM_FREQ_FIELD)) {
|
||||
} else if (MAX_TERM_FREQ_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.maxTermFreq(parser.floatValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, PREFIX_LENGTH_FIELD)) {
|
||||
} else if (PREFIX_LENGTH_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.prefixLength(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, MIN_WORD_LENGTH_FIELD)) {
|
||||
} else if (MIN_WORD_LENGTH_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.minWordLength(parser.intValue());
|
||||
} else if (parseFieldMatcher.match(currentFieldName, MIN_DOC_FREQ_FIELD)) {
|
||||
} else if (MIN_DOC_FREQ_FIELD.match(currentFieldName)) {
|
||||
tmpSuggestion.minDocFreq(parser.floatValue());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
|
|
|
@ -154,15 +154,15 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.ANALYZER_FIELD)) {
|
||||
if (SuggestionBuilder.ANALYZER_FIELD.match(currentFieldName)) {
|
||||
analyzer = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.FIELDNAME_FIELD)) {
|
||||
} else if (SuggestionBuilder.FIELDNAME_FIELD.match(currentFieldName)) {
|
||||
fieldname = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SIZE_FIELD)) {
|
||||
} else if (SuggestionBuilder.SIZE_FIELD.match(currentFieldName)) {
|
||||
sizeField = parser.intValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SHARDSIZE_FIELD)) {
|
||||
} else if (SuggestionBuilder.SHARDSIZE_FIELD.match(currentFieldName)) {
|
||||
shardSize = parser.intValue();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, RANDOM_SUFFIX_FIELD)) {
|
||||
} else if (RANDOM_SUFFIX_FIELD.match(currentFieldName)) {
|
||||
suffix = parser.text();
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -40,7 +40,7 @@ public class MatrixStatsParser extends NumericValuesSourceParser {
|
|||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, XContentParser.Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (parseFieldMatcher.match(currentFieldName, MULTIVALUE_MODE_FIELD)) {
|
||||
if (MULTIVALUE_MODE_FIELD.match(currentFieldName)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
otherOptions.put(MULTIVALUE_MODE_FIELD, parser.text());
|
||||
return true;
|
||||
|
|
|
@ -93,11 +93,11 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (parseFieldMatcher.match(currentFieldName, CommonFields.FIELDS)) {
|
||||
if (CommonFields.FIELDS.match(currentFieldName)) {
|
||||
fields = Collections.singletonList(parser.text());
|
||||
} else if (formattable && parseFieldMatcher.match(currentFieldName, CommonFields.FORMAT)) {
|
||||
} else if (formattable && CommonFields.FORMAT.match(currentFieldName)) {
|
||||
format = parser.text();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, CommonFields.VALUE_TYPE)) {
|
||||
} else if (CommonFields.VALUE_TYPE.match(currentFieldName)) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
||||
"Multi-field aggregations do not support scripts.");
|
||||
|
@ -106,7 +106,7 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
|||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, CommonFields.MISSING)) {
|
||||
if (CommonFields.MISSING.match(currentFieldName)) {
|
||||
missingMap = new HashMap<>();
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
parseMissingAndAdd(aggregationName, currentFieldName, parser, missingMap);
|
||||
|
@ -125,7 +125,7 @@ public abstract class MultiValuesSourceParser<VS extends ValuesSource> implement
|
|||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
||||
"Multi-field aggregations do not support scripts.");
|
||||
} else if (parseFieldMatcher.match(currentFieldName, CommonFields.FIELDS)) {
|
||||
} else if (CommonFields.FIELDS.match(currentFieldName)) {
|
||||
fields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
|
|
|
@ -87,13 +87,13 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler<Upda
|
|||
Map.Entry<String, Object> entry = itr.next();
|
||||
String parameterName = entry.getKey();
|
||||
Object parameterValue = entry.getValue();
|
||||
if (parseFieldMatcher.match(parameterName, Script.LANG_PARSE_FIELD)) {
|
||||
if (Script.LANG_PARSE_FIELD.match(parameterName)) {
|
||||
if (parameterValue instanceof String || parameterValue == null) {
|
||||
lang = (String) parameterValue;
|
||||
} else {
|
||||
throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
|
||||
}
|
||||
} else if (parseFieldMatcher.match(parameterName, Script.PARAMS_PARSE_FIELD)) {
|
||||
} else if (Script.PARAMS_PARSE_FIELD.match(parameterName)) {
|
||||
if (parameterValue instanceof Map || parameterValue == null) {
|
||||
params = (Map<String, Object>) parameterValue;
|
||||
} else {
|
||||
|
|
Loading…
Reference in New Issue