Merge branch 'master' into dots3

This commit is contained in:
Ryan Ernst 2016-04-18 14:24:37 -07:00
commit 2739ee6c8f
350 changed files with 5530 additions and 5345 deletions

View File

@ -1,5 +1,5 @@
Elasticsearch
Copyright 2009-2015 Elasticsearch
Copyright 2009-2016 Elasticsearch
This product includes software developed by The Apache Software
Foundation (http://www.apache.org/).

View File

@ -222,7 +222,7 @@ h1. License
<pre>
This software is licensed under the Apache License, version 2 ("ALv2"), quoted below.
Copyright 2009-2015 Elasticsearch <https://www.elastic.co>
Copyright 2009-2016 Elasticsearch <https://www.elastic.co>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of

View File

@ -18,6 +18,8 @@
*/
import com.bmuschko.gradle.nexus.NexusPlugin
import org.eclipse.jgit.lib.Repository
import org.eclipse.jgit.lib.RepositoryBuilder
import org.gradle.plugins.ide.eclipse.model.SourceFolder
// common maven publishing configuration
@ -50,21 +52,37 @@ subprojects {
javadoc = true
tests = false
}
nexus {
String buildSnapshot = System.getProperty('build.snapshot', 'true')
if (buildSnapshot == 'false') {
Repository repo = new RepositoryBuilder().findGitDir(new File('.')).build()
String shortHash = repo.resolve('HEAD')?.name?.substring(0,7)
repositoryUrl = project.hasProperty('build.repository') ? project.property('build.repository') : "file://${System.getenv('HOME')}/elasticsearch-releases/${version}-${shortHash}/"
}
}
// we have our own username/password prompts so that they only happen once
// TODO: add gpg signing prompts
// TODO: add gpg signing prompts, which is tricky, as the buildDeb/buildRpm tasks are executed before this code block
project.gradle.taskGraph.whenReady { taskGraph ->
if (taskGraph.allTasks.any { it.name == 'uploadArchives' }) {
Console console = System.console()
if (project.hasProperty('nexusUsername') == false) {
String nexusUsername = console.readLine('\nNexus username: ')
// no need for username/password on local deploy
if (project.nexus.repositoryUrl.startsWith('file://')) {
project.rootProject.allprojects.each {
it.ext.nexusUsername = nexusUsername
it.ext.nexusUsername = 'foo'
it.ext.nexusPassword = 'bar'
}
}
if (project.hasProperty('nexusPassword') == false) {
String nexusPassword = new String(console.readPassword('\nNexus password: '))
project.rootProject.allprojects.each {
it.ext.nexusPassword = nexusPassword
} else {
if (project.hasProperty('nexusUsername') == false) {
String nexusUsername = console.readLine('\nNexus username: ')
project.rootProject.allprojects.each {
it.ext.nexusUsername = nexusUsername
}
}
if (project.hasProperty('nexusPassword') == false) {
String nexusPassword = new String(console.readPassword('\nNexus password: '))
project.rootProject.allprojects.each {
it.ext.nexusPassword = nexusPassword
}
}
}
}
@ -181,6 +199,10 @@ allprojects {
outputDir = file('build-idea/classes/main')
testOutputDir = file('build-idea/classes/test')
// also ignore other possible build dirs
excludeDirs += file('build')
excludeDirs += file('build-eclipse')
iml {
// fix so that Gradle idea plugin properly generates support for resource folders
// see also https://issues.gradle.org/browse/GRADLE-2975

View File

@ -404,10 +404,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PrimaryShardAllocator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReplicaShardAllocator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]TransportNodesListGatewayMetaState.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]HttpTransportSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]HttpRequestHandler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyHttpChannel.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyHttpServerTransport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]AlreadyExpiredException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
@ -506,7 +502,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryShardContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SimpleQueryParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]InnerHitsQueryParserHelper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]QueryParsers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MatchQuery.java" checks="LineLength" />
@ -808,12 +803,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardsService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotsService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]PlainTransportFuture.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]RequestHandlerRegistry.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]Transport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]TransportChannelResponseHandler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]TransportService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queries[/\\]BlendedTermQueryTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]postingshighlight[/\\]CustomPostingsHighlighterTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]ESExceptionTests.java" checks="LineLength" />
@ -876,9 +865,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RecoveryWithUnsupportedIndicesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RestoreBackwardsCompatIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]AbstractClientHeadersTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]FailAndRetryMockTransport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClientIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClientRetryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterHealthIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterInfoServiceIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterModuleTests.java" checks="LineLength" />
@ -977,7 +963,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]rounding[/\\]TimeZoneRoundingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]ScopedSettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]SettingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]transport[/\\]BoundTransportAddressTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]DistanceUnitTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]FuzzinessTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]BigArraysTests.java" checks="LineLength" />
@ -1023,8 +1008,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReusePeerRecoverySharedTest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]get[/\\]GetActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyHttpServerPipeliningTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyPipeliningDisabledIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyPipeliningEnabledIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexWithShadowReplicasIT.java" checks="LineLength" />
@ -1305,15 +1288,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPoolSerializationTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]UpdateThreadPoolSettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]timestamp[/\\]SimpleTimestampIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]AbstractSimpleTransportTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]ActionNamesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]ContextAndHeaderTransportIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]NettySizeHeaderFrameDecoderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]local[/\\]SimpleLocalTransportTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyScheduledPingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransportIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransportMultiPortIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransportMultiPortTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]tribe[/\\]TribeIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]ttl[/\\]SimpleTTLIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]update[/\\]UpdateIT.java" checks="LineLength" />
@ -1503,9 +1477,6 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]support[/\\]FileUtils.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSDirectoryService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSIndexStore.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]transport[/\\]AssertingLocalTransport.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]transport[/\\]CapturingTransport.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]transport[/\\]MockTransportService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]test[/\\]FileUtilsTests.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]test[/\\]JsonPathTests.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]test[/\\]RestTestParserTests.java" checks="LineLength" />
@ -1516,7 +1487,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]SettingsModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]admin[/\\]indices[/\\]settings[/\\]RestGetSettingsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]tribe[/\\]TribeService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]TransportModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]GeoDistanceSortBuilderIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]CorsNotSetIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]SettingsModuleTests.java" checks="LineLength" />

View File

@ -219,7 +219,7 @@ h1. License
<pre>
This software is licensed under the Apache License, version 2 ("ALv2"), quoted below.
Copyright 2009-2015 Elasticsearch <https://www.elastic.co>
Copyright 2009-2016 Elasticsearch <https://www.elastic.co>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of

View File

@ -530,42 +530,45 @@ public class MapperQueryParser extends QueryParser {
}
}
if (tlist.size() == 0) {
return null;
}
if (tlist.size() == 1 && tlist.get(0).size() == 1) {
return super.getPrefixQuery(field, tlist.get(0).get(0));
} else {
// build a boolean query with prefix on the last position only.
List<BooleanClause> clauses = new ArrayList<>();
for (int pos = 0; pos < tlist.size(); pos++) {
List<String> plist = tlist.get(pos);
boolean isLastPos = (pos == tlist.size()-1);
Query posQuery;
if (plist.size() == 1) {
if (isLastPos) {
posQuery = getPrefixQuery(field, plist.get(0));
} else {
posQuery = newTermQuery(new Term(field, plist.get(0)));
}
} else if (isLastPos == false) {
// build a synonym query for terms in the same position.
Term[] terms = new Term[plist.size()];
for (int i = 0; i < plist.size(); i++) {
terms[i] = new Term(field, plist.get(i));
}
posQuery = new SynonymQuery(terms);
} else {
List<BooleanClause> innerClauses = new ArrayList<>();
for (String token : plist) {
innerClauses.add(new BooleanClause(getPrefixQuery(field, token),
BooleanClause.Occur.SHOULD));
}
posQuery = getBooleanQueryCoordDisabled(innerClauses);
}
clauses.add(new BooleanClause(posQuery,
getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses);
}
// build a boolean query with prefix on the last position only.
List<BooleanClause> clauses = new ArrayList<>();
for (int pos = 0; pos < tlist.size(); pos++) {
List<String> plist = tlist.get(pos);
boolean isLastPos = (pos == tlist.size() - 1);
Query posQuery;
if (plist.size() == 1) {
if (isLastPos) {
posQuery = super.getPrefixQuery(field, plist.get(0));
} else {
posQuery = newTermQuery(new Term(field, plist.get(0)));
}
} else if (isLastPos == false) {
// build a synonym query for terms in the same position.
Term[] terms = new Term[plist.size()];
for (int i = 0; i < plist.size(); i++) {
terms[i] = new Term(field, plist.get(i));
}
posQuery = new SynonymQuery(terms);
} else {
List<BooleanClause> innerClauses = new ArrayList<>();
for (String token : plist) {
innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token),
BooleanClause.Occur.SHOULD));
}
posQuery = getBooleanQueryCoordDisabled(innerClauses);
}
clauses.add(new BooleanClause(posQuery,
getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses);
}
@Override

View File

@ -24,14 +24,14 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
@ -40,7 +40,8 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
*/
public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAllocationExplainRequest> {
private static ObjectParser<ClusterAllocationExplainRequest, Void> PARSER = new ObjectParser("cluster/allocation/explain");
private static ObjectParser<ClusterAllocationExplainRequest, ParseFieldMatcherSupplier> PARSER = new ObjectParser(
"cluster/allocation/explain");
static {
PARSER.declareString(ClusterAllocationExplainRequest::setIndex, new ParseField("index"));
PARSER.declareInt(ClusterAllocationExplainRequest::setShard, new ParseField("shard"));
@ -148,7 +149,7 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAl
}
public static ClusterAllocationExplainRequest parse(XContentParser parser) throws IOException {
ClusterAllocationExplainRequest req = PARSER.parse(parser, new ClusterAllocationExplainRequest());
ClusterAllocationExplainRequest req = PARSER.parse(parser, new ClusterAllocationExplainRequest(), () -> ParseFieldMatcher.STRICT);
Exception e = req.validate();
if (e != null) {
throw new ElasticsearchParseException("'index', 'shard', and 'primary' must be specified in allocation explain request", e);

View File

@ -25,17 +25,14 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommandRegistry;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
/**
* Request to submit cluster reroute allocation commands
@ -105,7 +102,6 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
public ClusterRerouteRequest source(BytesReference source, AllocationCommandRegistry registry, ParseFieldMatcher parseFieldMatcher)
throws Exception {
try (XContentParser parser = XContentHelper.createParser(source)) {
parser.setParseFieldMatcher(parseFieldMatcher);
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -113,7 +109,7 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if ("commands".equals(currentFieldName)) {
this.commands = AllocationCommands.fromXContent(parser, registry);
this.commands = AllocationCommands.fromXContent(parser, parseFieldMatcher, registry);
} else {
throw new ElasticsearchParseException("failed to parse reroute request, got start array with wrong field name [{}]", currentFieldName);
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.create;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.alias.Alias;
@ -49,9 +48,9 @@ import java.util.Map;
import java.util.Set;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
* A request to create an index. Best created with {@link org.elasticsearch.client.Requests#createIndexRequest(String)}.
@ -305,8 +304,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Sets the aliases that will be associated with the index when it gets created
*/
public CreateIndexRequest aliases(BytesReference source) {
try {
XContentParser parser = XContentHelper.createParser(source);
try (XContentParser parser = XContentHelper.createParser(source)) {
//move to the first alias
parser.nextToken();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {

View File

@ -47,9 +47,9 @@ import java.util.Map;
import java.util.Set;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
* A request to create an index template.
@ -71,7 +71,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
private Map<String, String> mappings = new HashMap<>();
private final Set<Alias> aliases = new HashSet<>();
private Map<String, IndexMetaData.Custom> customs = new HashMap<>();
public PutIndexTemplateRequest() {
@ -356,7 +356,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
public Map<String, IndexMetaData.Custom> customs() {
return this.customs;
}
public Set<Alias> aliases() {
return this.aliases;
}
@ -393,8 +393,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* Sets the aliases that will be associated with the index when it gets created
*/
public PutIndexTemplateRequest aliases(BytesReference source) {
try {
XContentParser parser = XContentHelper.createParser(source);
try (XContentParser parser = XContentHelper.createParser(source)) {
//move to the first alias
parser.nextToken();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {

View File

@ -52,6 +52,7 @@ import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -107,7 +108,12 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
}
@Override
protected void doExecute(final BulkRequest bulkRequest, final ActionListener<BulkResponse> listener) {
protected final void doExecute(final BulkRequest bulkRequest, final ActionListener<BulkResponse> listener) {
throw new UnsupportedOperationException("task parameter is required for this operation");
}
@Override
protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener<BulkResponse> listener) {
final long startTime = relativeTime();
final AtomicArray<BulkItemResponse> responses = new AtomicArray<>(bulkRequest.requests.size());
@ -143,7 +149,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
public void onResponse(CreateIndexResponse result) {
if (counter.decrementAndGet() == 0) {
try {
executeBulk(bulkRequest, startTime, listener, responses);
executeBulk(task, bulkRequest, startTime, listener, responses);
} catch (Throwable t) {
listener.onFailure(t);
}
@ -163,7 +169,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
}
if (counter.decrementAndGet() == 0) {
try {
executeBulk(bulkRequest, startTime, listener, responses);
executeBulk(task, bulkRequest, startTime, listener, responses);
} catch (Throwable t) {
listener.onFailure(t);
}
@ -172,12 +178,12 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
});
} else {
if (counter.decrementAndGet() == 0) {
executeBulk(bulkRequest, startTime, listener, responses);
executeBulk(task, bulkRequest, startTime, listener, responses);
}
}
}
} else {
executeBulk(bulkRequest, startTime, listener, responses);
executeBulk(task, bulkRequest, startTime, listener, responses);
}
}
@ -222,14 +228,14 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
*/
public void executeBulk(final BulkRequest bulkRequest, final ActionListener<BulkResponse> listener) {
final long startTimeNanos = relativeTime();
executeBulk(bulkRequest, startTimeNanos, listener, new AtomicArray<>(bulkRequest.requests.size()));
executeBulk(null, bulkRequest, startTimeNanos, listener, new AtomicArray<>(bulkRequest.requests.size()));
}
private long buildTookInMillis(long startTimeNanos) {
return TimeUnit.NANOSECONDS.toMillis(relativeTime() - startTimeNanos);
}
void executeBulk(final BulkRequest bulkRequest, final long startTimeNanos, final ActionListener<BulkResponse> listener, final AtomicArray<BulkItemResponse> responses ) {
void executeBulk(Task task, final BulkRequest bulkRequest, final long startTimeNanos, final ActionListener<BulkResponse> listener, final AtomicArray<BulkItemResponse> responses ) {
final ClusterState clusterState = clusterService.state();
// TODO use timeout to wait here if its blocked...
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE);
@ -333,12 +339,16 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
}
final AtomicInteger counter = new AtomicInteger(requestsByShard.size());
String nodeId = clusterService.localNode().getId();
for (Map.Entry<ShardId, List<BulkItemRequest>> entry : requestsByShard.entrySet()) {
final ShardId shardId = entry.getKey();
final List<BulkItemRequest> requests = entry.getValue();
BulkShardRequest bulkShardRequest = new BulkShardRequest(bulkRequest, shardId, bulkRequest.refresh(), requests.toArray(new BulkItemRequest[requests.size()]));
bulkShardRequest.consistencyLevel(bulkRequest.consistencyLevel());
bulkShardRequest.timeout(bulkRequest.timeout());
if (task != null) {
bulkShardRequest.setParentTask(nodeId, task.getId());
}
shardBulkAction.execute(bulkShardRequest, new ActionListener<BulkShardResponse>() {
@Override
public void onResponse(BulkShardResponse bulkShardResponse) {

View File

@ -196,14 +196,14 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
PercolatorQueryBuilder percolatorQueryBuilder = new PercolatorQueryBuilder(percolateRequest.documentType(), documentSource);
if (querySource != null) {
QueryParseContext queryParseContext = new QueryParseContext(queryRegistry);
queryParseContext.reset(XContentHelper.createParser(querySource));
queryParseContext.parseFieldMatcher(parseFieldMatcher);
QueryBuilder<?> queryBuilder = queryParseContext.parseInnerQueryBuilder();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
boolQueryBuilder.must(queryBuilder);
boolQueryBuilder.filter(percolatorQueryBuilder);
searchSource.field("query", boolQueryBuilder);
try (XContentParser parser = XContentHelper.createParser(querySource)) {
QueryParseContext queryParseContext = new QueryParseContext(queryRegistry, parser, parseFieldMatcher);
QueryBuilder<?> queryBuilder = queryParseContext.parseInnerQueryBuilder();
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
boolQueryBuilder.must(queryBuilder);
boolQueryBuilder.filter(percolatorQueryBuilder);
searchSource.field("query", boolQueryBuilder);
}
} else {
// wrapping in a constant score query with boost 0 for bwc reason.
// percolator api didn't emit scores before and never included scores
@ -215,10 +215,8 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
searchSource.flush();
BytesReference source = searchSource.bytes();
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
QueryParseContext context = new QueryParseContext(queryRegistry);
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(source)) {
context.reset(parser);
context.parseFieldMatcher(parseFieldMatcher);
QueryParseContext context = new QueryParseContext(queryRegistry, parser, parseFieldMatcher);
searchSourceBuilder.parseXContent(context, aggParsers, null);
searchRequest.source(searchSourceBuilder);
return searchRequest;

View File

@ -20,6 +20,8 @@
package org.elasticsearch.bootstrap;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
@ -30,6 +32,10 @@ import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.monitor.process.ProcessProbe;
import org.elasticsearch.node.Node;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -42,7 +48,6 @@ import java.util.stream.Collectors;
* and all production limit checks must pass. This should be extended as we go to settings like:
* - discovery.zen.ping.unicast.hosts is set if we use zen disco
* - ensure we can write in all data directories
* - fail if vm.max_map_count is under a certain limit (not sure if this works cross platform)
* - fail if the default cluster.name is used, if this is setup on network a real clustername should be used?
*/
final class BootstrapCheck {
@ -121,6 +126,9 @@ final class BootstrapCheck {
checks.add(new MaxSizeVirtualMemoryCheck());
}
checks.add(new MinMasterNodesCheck(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.exists(settings)));
if (Constants.LINUX) {
checks.add(new MaxMapCountCheck());
}
return Collections.unmodifiableList(checks);
}
@ -327,4 +335,67 @@ final class BootstrapCheck {
}
static class MaxMapCountCheck implements Check {
private final long limit = 1 << 18;
@Override
public boolean check() {
return getMaxMapCount() != -1 && getMaxMapCount() < limit;
}
@Override
public String errorMessage() {
return String.format(
Locale.ROOT,
"max virtual memory areas vm.max_map_count [%d] likely too low, increase to at least [%d]",
getMaxMapCount(),
limit);
}
// visible for testing
long getMaxMapCount() {
return getMaxMapCount(Loggers.getLogger(BootstrapCheck.class));
}
// visible for testing
long getMaxMapCount(ESLogger logger) {
final Path path = getProcSysVmMaxMapCountPath();
try (final BufferedReader bufferedReader = getBufferedReader(path)) {
final String rawProcSysVmMaxMapCount = readProcSysVmMaxMapCount(bufferedReader);
if (rawProcSysVmMaxMapCount != null) {
try {
return parseProcSysVmMaxMapCount(rawProcSysVmMaxMapCount);
} catch (final NumberFormatException e) {
logger.warn("unable to parse vm.max_map_count [{}]", e, rawProcSysVmMaxMapCount);
}
}
} catch (final IOException e) {
logger.warn("I/O exception while trying to read [{}]", e, path);
}
return -1;
}
@SuppressForbidden(reason = "access /proc/sys/vm/max_map_count")
private Path getProcSysVmMaxMapCountPath() {
return PathUtils.get("/proc/sys/vm/max_map_count");
}
// visible for testing
BufferedReader getBufferedReader(final Path path) throws IOException {
return Files.newBufferedReader(path);
}
// visible for testing
String readProcSysVmMaxMapCount(final BufferedReader bufferedReader) throws IOException {
return bufferedReader.readLine();
}
// visible for testing
long parseProcSysVmMaxMapCount(final String procSysVmMaxMapCount) throws NumberFormatException {
return Long.parseLong(procSysVmMaxMapCount);
}
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.cluster;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -440,8 +441,10 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
builder.startObject("mappings");
for (ObjectObjectCursor<String, CompressedXContent> cursor1 : templateMetaData.mappings()) {
byte[] mappingSource = cursor1.value.uncompressed();
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
Map<String, Object> mapping = parser.map();
Map<String, Object> mapping;
try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {
mapping = parser.map();
}
if (mapping.size() == 1 && mapping.containsKey(cursor1.key)) {
// the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(cursor1.key);
@ -470,8 +473,10 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
builder.startObject("mappings");
for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) {
byte[] mappingSource = cursor.value.source().uncompressed();
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
Map<String, Object> mapping = parser.map();
Map<String, Object> mapping;
try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {
mapping = parser.map();
}
if (mapping.size() == 1 && mapping.containsKey(cursor.key)) {
// the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(cursor.key);

View File

@ -290,10 +290,10 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
builder.field("filter", aliasMetaData.filter.compressed());
} else {
byte[] data = aliasMetaData.filter().uncompressed();
XContentParser parser = XContentFactory.xContent(data).createParser(data);
Map<String, Object> filter = parser.mapOrdered();
parser.close();
builder.field("filter", filter);
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
Map<String, Object> filter = parser.mapOrdered();
builder.field("filter", filter);
}
}
}
if (aliasMetaData.indexRouting() != null) {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
@ -119,8 +118,7 @@ public class AliasValidator extends AbstractComponent {
*/
public void validateAliasFilter(String alias, String filter, QueryShardContext queryShardContext) {
assert queryShardContext != null;
try {
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) {
validateAliasFilter(parser, queryShardContext);
} catch (Throwable e) {
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
@ -134,8 +132,7 @@ public class AliasValidator extends AbstractComponent {
*/
public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext) {
assert queryShardContext != null;
try {
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) {
validateAliasFilter(parser, queryShardContext);
} catch (Throwable e) {
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
@ -144,12 +141,12 @@ public class AliasValidator extends AbstractComponent {
private void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
try {
queryShardContext.reset(parser);
QueryParseContext queryParseContext = queryShardContext.parseContext();
queryShardContext.reset();
QueryParseContext queryParseContext = queryShardContext.newParseContext(parser);
QueryBuilder<?> queryBuilder = QueryBuilder.rewriteQuery(queryParseContext.parseInnerQueryBuilder(), queryShardContext);
queryBuilder.toFilter(queryShardContext);
} finally {
queryShardContext.reset(null);
queryShardContext.reset();
parser.close();
}
}

View File

@ -23,6 +23,7 @@ import com.carrotsearch.hppc.LongArrayList;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.Diffable;
@ -927,10 +928,10 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
builder.value(cursor.value.source().compressed());
} else {
byte[] data = cursor.value.source().uncompressed();
XContentParser parser = XContentFactory.xContent(data).createParser(data);
Map<String, Object> mapping = parser.mapOrdered();
parser.close();
builder.map(mapping);
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
Map<String, Object> mapping = parser.mapOrdered();
builder.map(mapping);
}
}
}
builder.endArray();

View File

@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder;
@ -329,8 +330,10 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
builder.startObject("mappings");
for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetaData.mappings()) {
byte[] mappingSource = cursor.value.uncompressed();
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
Map<String, Object> mapping = parser.map();
Map<String, Object> mapping;
try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {;
mapping = parser.map();
}
if (mapping.size() == 1 && mapping.containsKey(cursor.key)) {
// the type name is the root value, reduce it
mapping = (Map<String, Object>) mapping.get(cursor.key);

View File

@ -127,11 +127,10 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) {
try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null)) {
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) {
MappingMetaData mappingMetaData = cursor.value;
mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false);
}
MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null);
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) {
MappingMetaData mappingMetaData = cursor.value;
mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false);
}
}
} catch (Exception ex) {

View File

@ -21,6 +21,8 @@ package org.elasticsearch.cluster.routing;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -45,7 +47,8 @@ public class AllocationId implements ToXContent {
private static final String ID_KEY = "id";
private static final String RELOCATION_ID_KEY = "relocation_id";
private static final ObjectParser<AllocationId.Builder, Void> ALLOCATION_ID_PARSER = new ObjectParser<>("allocationId");
private static final ObjectParser<AllocationId.Builder, ParseFieldMatcherSupplier> ALLOCATION_ID_PARSER = new ObjectParser<>(
"allocationId");
static {
ALLOCATION_ID_PARSER.declareString(AllocationId.Builder::setId, new ParseField(ID_KEY));
@ -198,6 +201,6 @@ public class AllocationId implements ToXContent {
}
public static AllocationId fromXContent(XContentParser parser) throws IOException {
return ALLOCATION_ID_PARSER.parse(parser, new AllocationId.Builder()).build();
return ALLOCATION_ID_PARSER.parse(parser, new AllocationId.Builder(), () -> ParseFieldMatcher.STRICT).build();
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
@ -47,8 +48,8 @@ public abstract class AbstractAllocateAllocationCommand implements AllocationCom
private static final String SHARD_FIELD = "shard";
private static final String NODE_FIELD = "node";
protected static <T extends Builder<?>> ObjectParser<T, Void> createAllocateParser(String command) {
ObjectParser<T, Void> parser = new ObjectParser<>(command);
protected static <T extends Builder<?>> ObjectParser<T, ParseFieldMatcherSupplier> createAllocateParser(String command) {
ObjectParser<T, ParseFieldMatcherSupplier> parser = new ObjectParser<>(command);
parser.declareString(Builder::setIndex, new ParseField(INDEX_FIELD));
parser.declareInt(Builder::setShard, new ParseField(SHARD_FIELD));
parser.declareString(Builder::setNode, new ParseField(NODE_FIELD));

View File

@ -28,6 +28,8 @@ import org.elasticsearch.cluster.routing.allocation.RerouteExplanation;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
@ -45,7 +47,8 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation
public static final String NAME = "allocate_empty_primary";
public static final ParseField COMMAND_NAME_FIELD = new ParseField(NAME);
private static final ObjectParser<Builder, Void> EMPTY_PRIMARY_PARSER = BasePrimaryAllocationCommand.createAllocatePrimaryParser(NAME);
private static final ObjectParser<Builder, ParseFieldMatcherSupplier> EMPTY_PRIMARY_PARSER = BasePrimaryAllocationCommand
.createAllocatePrimaryParser(NAME);
/**
* Creates a new {@link AllocateEmptyPrimaryAllocationCommand}
@ -78,7 +81,7 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation
@Override
public Builder parse(XContentParser parser) throws IOException {
return EMPTY_PRIMARY_PARSER.parse(parser, this);
return EMPTY_PRIMARY_PARSER.parse(parser, this, () -> ParseFieldMatcher.STRICT);
}
@Override

View File

@ -28,6 +28,8 @@ import org.elasticsearch.cluster.routing.allocation.RerouteExplanation;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
@ -44,7 +46,8 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation
public static final String NAME = "allocate_replica";
public static final ParseField COMMAND_NAME_FIELD = new ParseField(NAME);
private static final ObjectParser<AllocateReplicaAllocationCommand.Builder, Void> REPLICA_PARSER = createAllocateParser(NAME);
private static final ObjectParser<AllocateReplicaAllocationCommand.Builder, ParseFieldMatcherSupplier> REPLICA_PARSER =
createAllocateParser(NAME);
/**
* Creates a new {@link AllocateReplicaAllocationCommand}
@ -77,7 +80,7 @@ public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocation
@Override
public Builder parse(XContentParser parser) throws IOException {
return REPLICA_PARSER.parse(parser, this);
return REPLICA_PARSER.parse(parser, this, () -> ParseFieldMatcher.STRICT);
}
@Override

View File

@ -28,6 +28,8 @@ import org.elasticsearch.cluster.routing.allocation.RerouteExplanation;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
@ -44,7 +46,8 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation
public static final String NAME = "allocate_stale_primary";
public static final ParseField COMMAND_NAME_FIELD = new ParseField(NAME);
private static final ObjectParser<Builder, Void> STALE_PRIMARY_PARSER = BasePrimaryAllocationCommand.createAllocatePrimaryParser(NAME);
private static final ObjectParser<Builder, ParseFieldMatcherSupplier> STALE_PRIMARY_PARSER = BasePrimaryAllocationCommand
.createAllocatePrimaryParser(NAME);
/**
* Creates a new {@link AllocateStalePrimaryAllocationCommand}
@ -78,7 +81,7 @@ public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocation
@Override
public Builder parse(XContentParser parser) throws IOException {
return STALE_PRIMARY_PARSER.parse(parser, this);
return STALE_PRIMARY_PARSER.parse(parser, this, () -> ParseFieldMatcher.STRICT);
}
@Override

View File

@ -22,6 +22,7 @@ package org.elasticsearch.cluster.routing.allocation.command;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
@ -128,7 +129,8 @@ public class AllocationCommands {
* @return {@link AllocationCommands} read
* @throws IOException if something bad happens while reading the stream
*/
public static AllocationCommands fromXContent(XContentParser parser, AllocationCommandRegistry registry) throws IOException {
public static AllocationCommands fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher,
AllocationCommandRegistry registry) throws IOException {
AllocationCommands commands = new AllocationCommands();
XContentParser.Token token = parser.currentToken();
@ -157,7 +159,7 @@ public class AllocationCommands {
token = parser.nextToken();
String commandName = parser.currentName();
token = parser.nextToken();
commands.add(registry.lookup(commandName, parser).fromXContent(parser));
commands.add(registry.lookup(commandName, parser, parseFieldMatcher).fromXContent(parser));
// move to the end object one
if (parser.nextToken() != XContentParser.Token.END_OBJECT) {
throw new ElasticsearchParseException("allocation command is malformed, done parsing a command, but didn't get END_OBJECT, got [{}] instead", token);

View File

@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation.command;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
@ -34,8 +35,8 @@ public abstract class BasePrimaryAllocationCommand extends AbstractAllocateAlloc
private static final String ACCEPT_DATA_LOSS_FIELD = "accept_data_loss";
protected static <T extends Builder<?>> ObjectParser<T, Void> createAllocatePrimaryParser(String command) {
ObjectParser<T, Void> parser = AbstractAllocateAllocationCommand.createAllocateParser(command);
protected static <T extends Builder<?>> ObjectParser<T, ParseFieldMatcherSupplier> createAllocatePrimaryParser(String command) {
ObjectParser<T, ParseFieldMatcherSupplier> parser = AbstractAllocateAllocationCommand.createAllocateParser(command);
parser.declareBoolean(Builder::setAcceptDataLoss, new ParseField(ACCEPT_DATA_LOSS_FIELD));
return parser;
}

View File

@ -16,32 +16,21 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.global;
package org.elasticsearch.common;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.Aggregator;
import java.io.IOException;
import org.elasticsearch.index.query.QueryShardContext;
/**
*
* This interface should be implemented by classes like {@link QueryParseContext} or {@link QueryShardContext} that
* are able to carry a {@link ParseFieldMatcher}.
*/
public class GlobalParser implements Aggregator.Parser {
@Override
public String type() {
return InternalGlobal.TYPE.name();
}
@Override
public GlobalAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
context.parser().nextToken();
return new GlobalAggregatorBuilder(aggregationName);
}
@Override
public GlobalAggregatorBuilder getFactoryPrototypes() {
return GlobalAggregatorBuilder.PROTOTYPE;
}
@FunctionalInterface
public interface ParseFieldMatcherSupplier {
/**
* @return the parseFieldMatcher
*/
ParseFieldMatcher getParseFieldMatcher();
}

View File

@ -53,7 +53,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
public GeometryCollectionBuilder(StreamInput in) throws IOException {
int shapes = in.readVInt();
for (int i = 0; i < shapes; i++) {
shape(in.readShape());
shape(in.readNamedWriteable(ShapeBuilder.class));
}
}
@ -61,7 +61,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(shapes.size());
for (ShapeBuilder shape : shapes) {
out.writeShape(shape);
out.writeNamedWriteable(shape);
}
}

View File

@ -34,7 +34,7 @@ public class NamedWriteableAwareStreamInput extends FilterStreamInput {
}
@Override
<C extends NamedWriteable<?>> C readNamedWriteable(Class<C> categoryClass) throws IOException {
public <C extends NamedWriteable<?>> C readNamedWriteable(Class<C> categoryClass) throws IOException {
String name = readString();
Writeable.Reader<? extends C> reader = namedWriteableRegistry.getReader(categoryClass, name);
C c = reader.read(this);

View File

@ -34,14 +34,10 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
@ -725,100 +721,89 @@ public abstract class StreamInput extends InputStream {
* Default implementation throws {@link UnsupportedOperationException} as StreamInput doesn't hold a registry.
* Use {@link FilterInputStream} instead which wraps a stream and supports a {@link NamedWriteableRegistry} too.
*/
<C extends NamedWriteable<?>> C readNamedWriteable(@SuppressWarnings("unused") Class<C> categoryClass) throws IOException {
@Nullable
public <C extends NamedWriteable<?>> C readNamedWriteable(@SuppressWarnings("unused") Class<C> categoryClass) throws IOException {
throw new UnsupportedOperationException("can't read named writeable from StreamInput");
}
/**
* Reads a {@link AggregatorBuilder} from the current stream
* Reads an optional {@link NamedWriteable}.
*/
public AggregatorBuilder<?> readAggregatorBuilder() throws IOException {
return readNamedWriteable(AggregatorBuilder.class);
}
/**
* Reads a {@link PipelineAggregatorBuilder} from the current stream
*/
public PipelineAggregatorBuilder<?> readPipelineAggregatorBuilder() throws IOException {
return readNamedWriteable(PipelineAggregatorBuilder.class);
}
/**
* Reads a {@link QueryBuilder} from the current stream
*/
public QueryBuilder<?> readQuery() throws IOException {
return readNamedWriteable(QueryBuilder.class);
}
/**
* Reads an optional {@link QueryBuilder}.
*/
public QueryBuilder<?> readOptionalQuery() throws IOException {
public <C extends NamedWriteable<?>> C readOptionalNamedWriteable(Class<C> categoryClass) throws IOException {
if (readBoolean()) {
return readNamedWriteable(QueryBuilder.class);
return readNamedWriteable(categoryClass);
}
return null;
}
/**
* Reads a {@link ShapeBuilder} from the current stream
* Reads a {@link QueryBuilder} from the current stream
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link QueryBuilder}.
*/
public ShapeBuilder readShape() throws IOException {
return readNamedWriteable(ShapeBuilder.class);
}
/**
* Reads a {@link RescoreBuilder} from the current stream
*/
public RescoreBuilder<?> readRescorer() throws IOException {
return readNamedWriteable(RescoreBuilder.class);
@Deprecated
public QueryBuilder<?> readQuery() throws IOException {
return readNamedWriteable(QueryBuilder.class);
}
/**
* Reads a {@link SuggestionBuilder} from the current stream
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link SuggestionBuilder}.
*/
@Deprecated
public SuggestionBuilder<?> readSuggestion() throws IOException {
return readNamedWriteable(SuggestionBuilder.class);
}
/**
* Reads a {@link SortBuilder} from the current stream
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link SortBuilder}.
*/
@Deprecated
public SortBuilder<?> readSortBuilder() throws IOException {
return readNamedWriteable(SortBuilder.class);
}
/**
* Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link ScoreFunctionBuilder}.
*/
@Deprecated
public ScoreFunctionBuilder<?> readScoreFunction() throws IOException {
return readNamedWriteable(ScoreFunctionBuilder.class);
}
/**
* Reads a {@link SmoothingModel} from the current stream
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link SmoothingModel}.
*/
@Deprecated
public SmoothingModel readPhraseSuggestionSmoothingModel() throws IOException {
return readNamedWriteable(SmoothingModel.class);
}
/**
* Reads a {@link Task.Status} from the current stream.
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link Task.Status}.
*/
@Deprecated
public Task.Status readTaskStatus() throws IOException {
return readNamedWriteable(Task.Status.class);
}
/**
* Reads a {@link DocValueFormat} from the current stream.
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link DocValueFormat}.
*/
@Deprecated
public DocValueFormat readValueFormat() throws IOException {
return readNamedWriteable(DocValueFormat.class);
}
/**
* Reads an {@link AllocationCommand} from the stream.
* @deprecated prefer {@link #readNamedWriteable(Class)} passing {@link AllocationCommand}.
*/
@Deprecated
public AllocationCommand readAllocationCommand() throws IOException {
return readNamedWriteable(AllocationCommand.class);
}

View File

@ -33,14 +33,10 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
@ -687,68 +683,55 @@ public abstract class StreamOutput extends OutputStream {
/**
* Writes a {@link NamedWriteable} to the current stream, by first writing its name and then the object itself
*/
void writeNamedWriteable(NamedWriteable<?> namedWriteable) throws IOException {
public void writeNamedWriteable(NamedWriteable<?> namedWriteable) throws IOException {
writeString(namedWriteable.getWriteableName());
namedWriteable.writeTo(this);
}
/**
* Writes a {@link AggregatorBuilder} to the current stream
* Write an optional {@link NamedWriteable} to the stream.
*/
public void writeAggregatorBuilder(AggregatorBuilder<?> builder) throws IOException {
writeNamedWriteable(builder);
}
/**
* Writes a {@link PipelineAggregatorBuilder} to the current stream
*/
public void writePipelineAggregatorBuilder(PipelineAggregatorBuilder<?> builder) throws IOException {
writeNamedWriteable(builder);
public void writeOptionalNamedWriteable(@Nullable NamedWriteable<?> namedWriteable) throws IOException {
if (namedWriteable == null) {
writeBoolean(false);
} else {
writeBoolean(true);
writeNamedWriteable(namedWriteable);
}
}
/**
* Writes a {@link QueryBuilder} to the current stream
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writeQuery(QueryBuilder<?> queryBuilder) throws IOException {
writeNamedWriteable(queryBuilder);
}
/**
* Write an optional {@link QueryBuilder} to the stream.
*/
public void writeOptionalQuery(@Nullable QueryBuilder<?> queryBuilder) throws IOException {
if (queryBuilder == null) {
writeBoolean(false);
} else {
writeBoolean(true);
writeQuery(queryBuilder);
}
}
/**
* Writes a {@link ShapeBuilder} to the current stream
*/
public void writeShape(ShapeBuilder shapeBuilder) throws IOException {
writeNamedWriteable(shapeBuilder);
}
/**
* Writes a {@link ScoreFunctionBuilder} to the current stream
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writeScoreFunction(ScoreFunctionBuilder<?> scoreFunctionBuilder) throws IOException {
writeNamedWriteable(scoreFunctionBuilder);
}
/**
* Writes the given {@link SmoothingModel} to the stream
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writePhraseSuggestionSmoothingModel(SmoothingModel smoothinModel) throws IOException {
writeNamedWriteable(smoothinModel);
}
/**
* Writes a {@link Task.Status} to the current stream.
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writeTaskStatus(Task.Status status) throws IOException {
writeNamedWriteable(status);
}
@ -790,35 +773,38 @@ public abstract class StreamOutput extends OutputStream {
}
}
/**
* Writes a {@link RescoreBuilder} to the current stream
*/
public void writeRescorer(RescoreBuilder<?> rescorer) throws IOException {
writeNamedWriteable(rescorer);
}
/**
* Writes a {@link SuggestionBuilder} to the current stream
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writeSuggestion(SuggestionBuilder<?> suggestion) throws IOException {
writeNamedWriteable(suggestion);
}
/**
* Writes a {@link SortBuilder} to the current stream
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writeSortBuilder(SortBuilder<?> sort) throws IOException {
writeNamedWriteable(sort);
}
/** Writes a {@link DocValueFormat}. */
/**
* Writes a {@link DocValueFormat}.
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writeValueFormat(DocValueFormat format) throws IOException {
writeNamedWriteable(format);
}
/**
* Writes an {@link AllocationCommand} to the stream.
* @deprecated prefer {@link #writeNamedWriteable(NamedWriteable)}
*/
@Deprecated
public void writeAllocationCommand(AllocationCommand command) throws IOException {
writeNamedWriteable(command);
}

View File

@ -65,6 +65,7 @@ import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.http.netty.NettyHttpServerTransport;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.IndexingMemoryController;
import org.elasticsearch.indices.IndicesQueryCache;
import org.elasticsearch.indices.IndicesRequestCache;
import org.elasticsearch.indices.IndicesService;
@ -404,6 +405,11 @@ public final class ClusterSettings extends AbstractScopedSettings {
BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING,
BootstrapSettings.MLOCKALL_SETTING,
BootstrapSettings.SECCOMP_SETTING,
BootstrapSettings.CTRLHANDLER_SETTING
BootstrapSettings.CTRLHANDLER_SETTING,
IndexingMemoryController.INDEX_BUFFER_SIZE_SETTING,
IndexingMemoryController.MIN_INDEX_BUFFER_SIZE_SETTING,
IndexingMemoryController.MAX_INDEX_BUFFER_SIZE_SETTING,
IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING,
IndexingMemoryController.SHARD_MEMORY_INTERVAL_TIME_SETTING
)));
}

View File

@ -523,9 +523,9 @@ public class Setting<T> extends ToXContentToBytes {
return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), properties);
}
public static Setting<ByteSizeValue> byteSizeSetting(String key, ByteSizeValue value, ByteSizeValue minValue, ByteSizeValue maxValue,
public static Setting<ByteSizeValue> byteSizeSetting(String key, ByteSizeValue defaultValue, ByteSizeValue minValue, ByteSizeValue maxValue,
Property... properties) {
return byteSizeSetting(key, (s) -> value.toString(), minValue, maxValue, properties);
return byteSizeSetting(key, (s) -> defaultValue.toString(), minValue, maxValue, properties);
}
public static Setting<ByteSizeValue> byteSizeSetting(String key, Function<Settings, String> defaultValue,

View File

@ -20,6 +20,7 @@ package org.elasticsearch.common.xcontent;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.ParsingException;
import java.io.IOException;
@ -51,7 +52,7 @@ import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_STRIN
* use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of {@link #declareField} which can be used
* to implement exceptional parsing operations not covered by the high level methods.
*/
public final class ObjectParser<Value, Context> implements BiFunction<XContentParser, Context, Value> {
public final class ObjectParser<Value, Context extends ParseFieldMatcherSupplier> implements BiFunction<XContentParser, Context, Value> {
/**
* Adapts an array (or varags) setter into a list setter.
*/
@ -87,32 +88,22 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
/**
* Parses a Value from the given {@link XContentParser}
* @param parser the parser to build a value from
* @param context must at least provide a {@link ParseFieldMatcher}
* @return a new value instance drawn from the provided value supplier on {@link #ObjectParser(String, Supplier)}
* @throws IOException if an IOException occurs.
*/
public Value parse(XContentParser parser) throws IOException {
public Value parse(XContentParser parser, Context context) throws IOException {
if (valueSupplier == null) {
throw new NullPointerException("valueSupplier is not set");
}
return parse(parser, valueSupplier.get(), null);
return parse(parser, valueSupplier.get(), context);
}
/**
* Parses a Value from the given {@link XContentParser}
* @param parser the parser to build a value from
* @param value the value to fill from the parser
* @return the parsed value
* @throws IOException if an IOException occurs.
*/
public Value parse(XContentParser parser, Value value) throws IOException {
return parse(parser, value, null);
}
/**
* Parses a Value from the given {@link XContentParser}
* @param parser the parser to build a value from
* @param value the value to fill from the parser
* @param context an optional context that is passed along to all declared field parsers
* @param context a context that is passed along to all declared field parsers
* @return the parsed value
* @throws IOException if an IOException occurs.
*/
@ -138,7 +129,7 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
throw new IllegalStateException("[" + name + "] no field found");
}
assert fieldParser != null;
fieldParser.assertSupports(name, token, currentFieldName, parser.getParseFieldMatcher());
fieldParser.assertSupports(name, token, currentFieldName, context.getParseFieldMatcher());
parseSub(parser, fieldParser, currentFieldName, value, context);
fieldParser = null;
}

View File

@ -68,13 +68,13 @@ public class ParseFieldRegistry<T> {
* Lookup a value from the registry by name while checking that the name matches the ParseField.
*
* @param name The name of the thing to look up.
* @param parser The parser from which the name was looked up. This is used to resolve the {@link ParseFieldMatcher} and to build nice
* error messages.
* @param parser The parser from which the name was looked up.
* @param parseFieldMatcher to build nice error messages.
* @return The value being looked up. Never null.
* @throws ParsingException if the named thing isn't in the registry or the name was deprecated and deprecated names aren't supported.
*/
public T lookup(String name, XContentParser parser) {
T value = lookupReturningNullIfNotFound(name, parser);
public T lookup(String name, XContentParser parser, ParseFieldMatcher parseFieldMatcher) {
T value = lookupReturningNullIfNotFound(name, parseFieldMatcher);
if (value == null) {
throw new ParsingException(parser.getTokenLocation(), "no [" + registryName + "] registered for [" + name + "]");
}
@ -85,19 +85,19 @@ public class ParseFieldRegistry<T> {
* Lookup a value from the registry by name while checking that the name matches the ParseField.
*
* @param name The name of the thing to look up.
* @param parser The parser from which the name was looked up. This is used to resolve the {@link ParseFieldMatcher} and to build nice
* @param parseFieldMatcher The parseFieldMatcher. This is used to resolve the {@link ParseFieldMatcher} and to build nice
* error messages.
* @return The value being looked up or null if it wasn't found.
* @throws ParsingException if the named thing isn't in the registry or the name was deprecated and deprecated names aren't supported.
*/
public T lookupReturningNullIfNotFound(String name, XContentParser parser) {
public T lookupReturningNullIfNotFound(String name, ParseFieldMatcher parseFieldMatcher) {
Tuple<ParseField, T> parseFieldAndValue = registry.get(name);
if (parseFieldAndValue == null) {
return null;
}
ParseField parseField = parseFieldAndValue.v1();
T value = parseFieldAndValue.v2();
boolean match = parser.getParseFieldMatcher().match(name, parseField);
boolean match = parseFieldMatcher.match(name, parseField);
//this is always expected to match, ParseField is useful for deprecation warnings etc. here
assert match : "ParseField did not match registered name [" + name + "][" + registryName + "]";
return value;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.common.xcontent;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.lease.Releasable;
import java.io.IOException;
@ -187,7 +186,7 @@ public interface XContentParser extends Releasable {
float floatValue(boolean coerce) throws IOException;
double doubleValue(boolean coerce) throws IOException;
short shortValue() throws IOException;
int intValue() throws IOException;
@ -241,21 +240,10 @@ public interface XContentParser extends Releasable {
/**
* Used for error reporting to highlight where syntax errors occur in
* content being parsed.
*
*
* @return last token's location or null if cannot be determined
*/
XContentLocation getTokenLocation();
boolean isClosed();
/**
* Returns this parsers {@link ParseFieldMatcher}
*/
ParseFieldMatcher getParseFieldMatcher();
/**
* Sets this parsers {@link ParseFieldMatcher}
*/
void setParseFieldMatcher(ParseFieldMatcher matcher) ;
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent.support;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -37,8 +36,6 @@ import java.util.Map;
*/
public abstract class AbstractXContentParser implements XContentParser {
private ParseFieldMatcher matcher = ParseFieldMatcher.STRICT;
// Currently this is not a setting that can be changed and is a policy
// that relates to how parsing of things like "boost" are done across
// the whole of Elasticsearch (eg if String "1.0" is a valid float).
@ -337,12 +334,4 @@ public abstract class AbstractXContentParser implements XContentParser {
@Override
public abstract boolean isClosed();
public ParseFieldMatcher getParseFieldMatcher() {
return matcher;
}
public void setParseFieldMatcher(ParseFieldMatcher matcher) {
this.matcher = matcher;
}
}

View File

@ -295,15 +295,15 @@ public abstract class MetaDataStateFormat<T> {
try {
final Path stateFile = pathAndStateId.file;
final long id = pathAndStateId.id;
final XContentParser parser;
if (pathAndStateId.legacy) { // read the legacy format -- plain XContent
final byte[] data = Files.readAllBytes(stateFile);
if (data.length == 0) {
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
continue;
}
parser = XContentHelper.createParser(new BytesArray(data));
state = fromXContent(parser);
try (final XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
state = fromXContent(parser);
}
if (state == null) {
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
}

View File

@ -36,13 +36,13 @@ public final class HttpTransportSettings {
public static final Setting<Boolean> SETTING_CORS_ENABLED =
Setting.boolSetting("http.cors.enabled", false, Property.NodeScope);
public static final Setting<String> SETTING_CORS_ALLOW_ORIGIN =
new Setting<String>("http.cors.allow-origin", "", (value) -> value, Property.NodeScope);
new Setting<>("http.cors.allow-origin", "", (value) -> value, Property.NodeScope);
public static final Setting<Integer> SETTING_CORS_MAX_AGE =
Setting.intSetting("http.cors.max-age", 1728000, Property.NodeScope);
public static final Setting<String> SETTING_CORS_ALLOW_METHODS =
new Setting<String>("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, Property.NodeScope);
new Setting<>("http.cors.allow-methods", "OPTIONS, HEAD, GET, POST, PUT, DELETE", (value) -> value, Property.NodeScope);
public static final Setting<String> SETTING_CORS_ALLOW_HEADERS =
new Setting<String>("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, Property.NodeScope);
new Setting<>("http.cors.allow-headers", "X-Requested-With, Content-Type, Content-Length", (value) -> value, Property.NodeScope);
public static final Setting<Boolean> SETTING_CORS_ALLOW_CREDENTIALS =
Setting.boolSetting("http.cors.allow-credentials", false, Property.NodeScope);
public static final Setting<Boolean> SETTING_PIPELINING =
@ -61,7 +61,7 @@ public final class HttpTransportSettings {
listSetting("http.bind_host", SETTING_HTTP_HOST, Function.identity(), Property.NodeScope);
public static final Setting<PortsRange> SETTING_HTTP_PORT =
new Setting<PortsRange>("http.port", "9200-9300", PortsRange::new, Property.NodeScope);
new Setting<>("http.port", "9200-9300", PortsRange::new, Property.NodeScope);
public static final Setting<Integer> SETTING_HTTP_PUBLISH_PORT =
Setting.intSetting("http.publish_port", -1, -1, Property.NodeScope);
public static final Setting<Boolean> SETTING_HTTP_DETAILED_ERRORS_ENABLED =

View File

@ -118,7 +118,8 @@ public final class NettyHttpChannel extends AbstractRestChannel {
ChannelFuture future;
if (orderedUpstreamMessageEvent != null) {
OrderedDownstreamChannelEvent downstreamChannelEvent = new OrderedDownstreamChannelEvent(orderedUpstreamMessageEvent, 0, true, resp);
OrderedDownstreamChannelEvent downstreamChannelEvent =
new OrderedDownstreamChannelEvent(orderedUpstreamMessageEvent, 0, true, resp);
future = downstreamChannelEvent.getFuture();
channel.getPipeline().sendDownstream(downstreamChannelEvent);
} else {

View File

@ -249,7 +249,8 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
if (receivePredictorMax.bytes() == receivePredictorMin.bytes()) {
receiveBufferSizePredictorFactory = new FixedReceiveBufferSizePredictorFactory((int) receivePredictorMax.bytes());
} else {
receiveBufferSizePredictorFactory = new AdaptiveReceiveBufferSizePredictorFactory((int) receivePredictorMin.bytes(), (int) receivePredictorMin.bytes(), (int) receivePredictorMax.bytes());
receiveBufferSizePredictorFactory = new AdaptiveReceiveBufferSizePredictorFactory(
(int) receivePredictorMin.bytes(), (int) receivePredictorMin.bytes(), (int) receivePredictorMax.bytes());
}
this.compression = SETTING_HTTP_COMPRESSION.get(settings);
@ -265,8 +266,9 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
}
this.maxContentLength = maxContentLength;
logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], receive_predictor[{}->{}], pipelining[{}], pipelining_max_events[{}]",
maxChunkSize, maxHeaderSize, maxInitialLineLength, this.maxContentLength, receivePredictorMin, receivePredictorMax, pipelining, pipeliningMaxEvents);
logger.debug("using max_chunk_size[{}], max_header_size[{}], max_initial_line_length[{}], max_content_length[{}], " +
"receive_predictor[{}->{}], pipelining[{}], pipelining_max_events[{}]", maxChunkSize, maxHeaderSize, maxInitialLineLength,
this.maxContentLength, receivePredictorMin, receivePredictorMax, pipelining, pipeliningMaxEvents);
}
public Settings settings() {
@ -335,7 +337,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
final int publishPort = resolvePublishPort(settings, boundAddresses, publishInetAddress);
final InetSocketAddress publishAddress = new InetSocketAddress(publishInetAddress, publishPort);
return new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[boundAddresses.size()]), new InetSocketTransportAddress(publishAddress));
return new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[0]), new InetSocketTransportAddress(publishAddress));
}
// package private for tests

View File

@ -239,7 +239,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
}
} finally {
IOUtils.close(bitsetFilterCache, indexCache, mapperService, indexFieldData, analysisService, refreshTask, fsyncTask,
IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask,
cache().getPercolatorQueryCache());
}
}

View File

@ -33,7 +33,7 @@ import java.util.concurrent.TimeUnit;
/**
*/
public final class SearchSlowLog implements SearchOperationListener {
private final Index index;
private boolean reformat;
private long queryWarnThreshold;
@ -87,6 +87,8 @@ public final class SearchSlowLog implements SearchOperationListener {
this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query");
this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch");
this.index = indexSettings.getIndex();
indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_REFORMAT, this::setReformat);
this.reformat = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_REFORMAT);
@ -120,36 +122,38 @@ public final class SearchSlowLog implements SearchOperationListener {
@Override
public void onQueryPhase(SearchContext context, long tookInNanos) {
if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) {
queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
queryLogger.warn("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
} else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) {
queryLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
queryLogger.info("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
} else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) {
queryLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
queryLogger.debug("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
} else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) {
queryLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
queryLogger.trace("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
}
}
@Override
public void onFetchPhase(SearchContext context, long tookInNanos) {
if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) {
fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
fetchLogger.warn("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
} else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) {
fetchLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
fetchLogger.info("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
} else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) {
fetchLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
fetchLogger.debug("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
} else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) {
fetchLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat));
fetchLogger.trace("{}", new SlowLogSearchContextPrinter(index, context, tookInNanos, reformat));
}
}
private static class SlowLogSearchContextPrinter {
static final class SlowLogSearchContextPrinter {
private final SearchContext context;
private final Index index;
private final long tookInNanos;
private final boolean reformat;
public SlowLogSearchContextPrinter(SearchContext context, long tookInNanos, boolean reformat) {
public SlowLogSearchContextPrinter(Index index, SearchContext context, long tookInNanos, boolean reformat) {
this.context = context;
this.index = index;
this.tookInNanos = tookInNanos;
this.reformat = reformat;
}
@ -157,6 +161,7 @@ public final class SearchSlowLog implements SearchOperationListener {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(index).append(" ");
sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ");
if (context.getQueryShardContext().getTypes() == null) {
sb.append("types[], ");

View File

@ -426,6 +426,7 @@ public abstract class Engine implements Closeable {
stats.addStoredFieldsMemoryInBytes(guardedRamBytesUsed(segmentReader.getFieldsReader()));
stats.addTermVectorsMemoryInBytes(guardedRamBytesUsed(segmentReader.getTermVectorsReader()));
stats.addNormsMemoryInBytes(guardedRamBytesUsed(segmentReader.getNormsReader()));
stats.addPointsMemoryInBytes(guardedRamBytesUsed(segmentReader.getPointsReader()));
stats.addDocValuesMemoryInBytes(guardedRamBytesUsed(segmentReader.getDocValuesReader()));
if (includeSegmentFileSizes) {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.engine;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.Version;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -42,6 +41,7 @@ public class SegmentsStats implements Streamable, ToXContent {
private long storedFieldsMemoryInBytes;
private long termVectorsMemoryInBytes;
private long normsMemoryInBytes;
private long pointsMemoryInBytes;
private long docValuesMemoryInBytes;
private long indexWriterMemoryInBytes;
private long indexWriterMaxMemoryInBytes;
@ -67,6 +67,8 @@ public class SegmentsStats implements Streamable, ToXContent {
.fPut("pay", "Payloads")
.fPut("nvd", "Norms")
.fPut("nvm", "Norms")
.fPut("dii", "Points")
.fPut("dim", "Points")
.fPut("dvd", "DocValues")
.fPut("dvm", "DocValues")
.fPut("tvx", "Term Vector Index")
@ -98,6 +100,10 @@ public class SegmentsStats implements Streamable, ToXContent {
this.normsMemoryInBytes += normsMemoryInBytes;
}
public void addPointsMemoryInBytes(long pointsMemoryInBytes) {
this.pointsMemoryInBytes += pointsMemoryInBytes;
}
public void addDocValuesMemoryInBytes(long docValuesMemoryInBytes) {
this.docValuesMemoryInBytes += docValuesMemoryInBytes;
}
@ -143,6 +149,7 @@ public class SegmentsStats implements Streamable, ToXContent {
addStoredFieldsMemoryInBytes(mergeStats.storedFieldsMemoryInBytes);
addTermVectorsMemoryInBytes(mergeStats.termVectorsMemoryInBytes);
addNormsMemoryInBytes(mergeStats.normsMemoryInBytes);
addPointsMemoryInBytes(mergeStats.pointsMemoryInBytes);
addDocValuesMemoryInBytes(mergeStats.docValuesMemoryInBytes);
addIndexWriterMemoryInBytes(mergeStats.indexWriterMemoryInBytes);
addIndexWriterMaxMemoryInBytes(mergeStats.indexWriterMaxMemoryInBytes);
@ -213,6 +220,17 @@ public class SegmentsStats implements Streamable, ToXContent {
return new ByteSizeValue(normsMemoryInBytes);
}
/**
* Estimation of the points memory usage by a segment.
*/
public long getPointsMemoryInBytes() {
return this.pointsMemoryInBytes;
}
public ByteSizeValue getPointsMemory() {
return new ByteSizeValue(pointsMemoryInBytes);
}
/**
* Estimation of the doc values memory usage by a segment.
*/
@ -287,6 +305,7 @@ public class SegmentsStats implements Streamable, ToXContent {
builder.byteSizeField(Fields.STORED_FIELDS_MEMORY_IN_BYTES, Fields.STORED_FIELDS_MEMORY, storedFieldsMemoryInBytes);
builder.byteSizeField(Fields.TERM_VECTORS_MEMORY_IN_BYTES, Fields.TERM_VECTORS_MEMORY, termVectorsMemoryInBytes);
builder.byteSizeField(Fields.NORMS_MEMORY_IN_BYTES, Fields.NORMS_MEMORY, normsMemoryInBytes);
builder.byteSizeField(Fields.POINTS_MEMORY_IN_BYTES, Fields.POINTS_MEMORY, pointsMemoryInBytes);
builder.byteSizeField(Fields.DOC_VALUES_MEMORY_IN_BYTES, Fields.DOC_VALUES_MEMORY, docValuesMemoryInBytes);
builder.byteSizeField(Fields.INDEX_WRITER_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MEMORY, indexWriterMemoryInBytes);
builder.byteSizeField(Fields.INDEX_WRITER_MAX_MEMORY_IN_BYTES, Fields.INDEX_WRITER_MAX_MEMORY, indexWriterMaxMemoryInBytes);
@ -318,6 +337,8 @@ public class SegmentsStats implements Streamable, ToXContent {
static final XContentBuilderString TERM_VECTORS_MEMORY_IN_BYTES = new XContentBuilderString("term_vectors_memory_in_bytes");
static final XContentBuilderString NORMS_MEMORY = new XContentBuilderString("norms_memory");
static final XContentBuilderString NORMS_MEMORY_IN_BYTES = new XContentBuilderString("norms_memory_in_bytes");
static final XContentBuilderString POINTS_MEMORY = new XContentBuilderString("points_memory");
static final XContentBuilderString POINTS_MEMORY_IN_BYTES = new XContentBuilderString("points_memory_in_bytes");
static final XContentBuilderString DOC_VALUES_MEMORY = new XContentBuilderString("doc_values_memory");
static final XContentBuilderString DOC_VALUES_MEMORY_IN_BYTES = new XContentBuilderString("doc_values_memory_in_bytes");
static final XContentBuilderString INDEX_WRITER_MEMORY = new XContentBuilderString("index_writer_memory");
@ -342,24 +363,21 @@ public class SegmentsStats implements Streamable, ToXContent {
storedFieldsMemoryInBytes = in.readLong();
termVectorsMemoryInBytes = in.readLong();
normsMemoryInBytes = in.readLong();
pointsMemoryInBytes = in.readLong();
docValuesMemoryInBytes = in.readLong();
indexWriterMemoryInBytes = in.readLong();
versionMapMemoryInBytes = in.readLong();
indexWriterMaxMemoryInBytes = in.readLong();
bitsetMemoryInBytes = in.readLong();
if (in.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
int size = in.readVInt();
ImmutableOpenMap.Builder<String, Long> map = ImmutableOpenMap.builder(size);
for (int i = 0; i < size; i++) {
String key = in.readString();
Long value = in.readLong();
map.put(key, value);
}
fileSizes = map.build();
} else {
fileSizes = ImmutableOpenMap.of();
int size = in.readVInt();
ImmutableOpenMap.Builder<String, Long> map = ImmutableOpenMap.builder(size);
for (int i = 0; i < size; i++) {
String key = in.readString();
Long value = in.readLong();
map.put(key, value);
}
fileSizes = map.build();
}
@Override
@ -370,19 +388,18 @@ public class SegmentsStats implements Streamable, ToXContent {
out.writeLong(storedFieldsMemoryInBytes);
out.writeLong(termVectorsMemoryInBytes);
out.writeLong(normsMemoryInBytes);
out.writeLong(pointsMemoryInBytes);
out.writeLong(docValuesMemoryInBytes);
out.writeLong(indexWriterMemoryInBytes);
out.writeLong(versionMapMemoryInBytes);
out.writeLong(indexWriterMaxMemoryInBytes);
out.writeLong(bitsetMemoryInBytes);
if (out.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
out.writeVInt(fileSizes.size());
for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) {
ObjectObjectCursor<String, Long> entry = it.next();
out.writeString(entry.key);
out.writeLong(entry.value);
}
out.writeVInt(fileSizes.size());
for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) {
ObjectObjectCursor<String, Long> entry = it.next();
out.writeString(entry.key);
out.writeLong(entry.value);
}
}
}

View File

@ -353,10 +353,6 @@ public class DocumentMapper implements ToXContent {
return new DocumentMapper(mapperService, updated);
}
public void close() {
documentParser.close();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return mapping.toXContent(builder, params);

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@ -28,7 +27,6 @@ import java.util.List;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
@ -38,12 +36,12 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper.KeywordFieldType;
import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyDoubleFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyFloatFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyIntegerFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper.KeywordFieldType;
import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
@ -56,14 +54,7 @@ import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import org.elasticsearch.index.mapper.object.ObjectMapper;
/** A parser for documents, given mappings from a DocumentMapper */
final class DocumentParser implements Closeable {
private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() {
@Override
protected ParseContext.InternalParseContext initialValue() {
return new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, new ContentPath(0));
}
};
final class DocumentParser {
private final IndexSettings indexSettings;
private final DocumentMapperParser docMapperParser;
@ -80,7 +71,7 @@ final class DocumentParser implements Closeable {
source.type(docMapper.type());
final Mapping mapping = docMapper.mapping();
final ParseContext.InternalParseContext context = cache.get();
final ParseContext.InternalParseContext context = new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, new ContentPath(0));
XContentParser parser = null;
try {
parser = parser(source);
@ -96,12 +87,14 @@ final class DocumentParser implements Closeable {
parser.close();
}
}
String remainingPath = context.path().pathAsText("");
if (remainingPath.isEmpty() == false) {
throw new IllegalStateException("found leftover path elements: " + remainingPath);
}
reverseOrder(context);
ParsedDocument doc = parsedDocument(source, context, createDynamicUpdate(mapping, docMapper, context.getDynamicMappers()));
// reset the context to free up memory
context.reset(null, null, null);
return doc;
}
@ -542,6 +535,7 @@ final class DocumentParser implements Closeable {
context.addDynamicMapper(mapper);
context.path().add(arrayFieldName);
parseObjectOrField(context, mapper);
context.path().remove();
} else {
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
}
@ -925,9 +919,4 @@ final class DocumentParser implements Closeable {
}
return objectMapper.getMapper(subfields[subfields.length - 1]);
}
@Override
public void close() {
cache.close();
}
}

View File

@ -63,7 +63,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
/**
*
*/
public class MapperService extends AbstractIndexComponent implements Closeable {
public class MapperService extends AbstractIndexComponent {
/**
* The reason why a mapping is being merged.
@ -166,13 +166,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
@Override
public void close() {
for (DocumentMapper documentMapper : mappers.values()) {
documentMapper.close();
}
}
public boolean hasNested() {
return this.hasNested;
}

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.percolator;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query;
@ -28,6 +26,7 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -36,8 +35,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
@ -154,7 +151,8 @@ public class PercolatorFieldMapper extends FieldMapper {
@Override
public Mapper parse(ParseContext context) throws IOException {
QueryShardContext queryShardContext = new QueryShardContext(this.queryShardContext);
QueryBuilder<?> queryBuilder = parseQueryBuilder(queryShardContext.parseContext(), context.parser());
XContentParser parser = context.parser();
QueryBuilder<?> queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation());
// Fetching of terms, shapes and indexed scripts happen during this rewrite:
queryBuilder = queryBuilder.rewrite(queryShardContext);
@ -171,7 +169,7 @@ public class PercolatorFieldMapper extends FieldMapper {
}
public static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException {
return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(context.parseContext(), parser));
return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(context.newParseContext(parser), parser.getTokenLocation()));
}
static Query toQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, QueryBuilder<?> queryBuilder) throws IOException {
@ -190,7 +188,6 @@ public class PercolatorFieldMapper extends FieldMapper {
// as an analyzed string.
context.setAllowUnmappedFields(false);
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
context.parseFieldMatcher(context.getIndexSettings().getParseFieldMatcher());
try {
return queryBuilder.toQuery(context);
} finally {
@ -198,14 +195,11 @@ public class PercolatorFieldMapper extends FieldMapper {
}
}
static QueryBuilder<?> parseQueryBuilder(QueryParseContext context, XContentParser parser) {
context.reset(parser);
static QueryBuilder<?> parseQueryBuilder(QueryParseContext context, XContentLocation location) {
try {
return context.parseInnerQueryBuilder();
} catch (IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
} finally {
context.reset(null);
throw new ParsingException(location, "Failed to parse", e);
}
}

View File

@ -368,17 +368,17 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
}
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
disableCoord = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH)) {
minimumShouldMatch = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_NUMBER_SHOULD_MATCH)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_NUMBER_SHOULD_MATCH)) {
minimumShouldMatch = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ADJUST_PURE_NEGATIVE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ADJUST_PURE_NEGATIVE)) {
adjustPureNegative = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[bool] query does not support [" + currentFieldName + "]");

View File

@ -154,21 +154,21 @@ public class BoostingQueryBuilder extends AbstractQueryBuilder<BoostingQueryBuil
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, POSITIVE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, POSITIVE_FIELD)) {
positiveQuery = parseContext.parseInnerQueryBuilder();
positiveQueryFound = true;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NEGATIVE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, NEGATIVE_FIELD)) {
negativeQuery = parseContext.parseInnerQueryBuilder();
negativeQueryFound = true;
} else {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, NEGATIVE_BOOST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, NEGATIVE_BOOST_FIELD)) {
negativeBoost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[boosting] query does not support [" + currentFieldName + "]");

View File

@ -287,15 +287,15 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
String innerFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
innerFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(innerFieldName, LOW_FREQ_FIELD)) {
if (parseContext.getParseFieldMatcher().match(innerFieldName, LOW_FREQ_FIELD)) {
lowFreqMinimumShouldMatch = parser.text();
} else if (parseContext.parseFieldMatcher().match(innerFieldName, HIGH_FREQ_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(innerFieldName, HIGH_FREQ_FIELD)) {
highFreqMinimumShouldMatch = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +
@ -313,23 +313,23 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
"] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
text = parser.objectText();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
analyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
disableCoord = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, HIGH_FREQ_OPERATOR_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, HIGH_FREQ_OPERATOR_FIELD)) {
highFreqOperator = Operator.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOW_FREQ_OPERATOR_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOW_FREQ_OPERATOR_FIELD)) {
lowFreqOperator = Operator.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
lowFreqMinimumShouldMatch = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
cutoffFrequency = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME +

View File

@ -102,7 +102,7 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder<ConstantScor
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) {
if (queryFound) {
throw new ParsingException(parser.getTokenLocation(), "[" + ConstantScoreQueryBuilder.NAME + "]"
+ " accepts only one 'filter' element.");
@ -114,9 +114,9 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder<ConstantScor
"[constant_score] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -138,7 +138,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder<DisMaxQueryBuilder>
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERIES_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERIES_FIELD)) {
queriesFound = true;
QueryBuilder<?> query = parseContext.parseInnerQueryBuilder();
queries.add(query);
@ -146,7 +146,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder<DisMaxQueryBuilder>
throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERIES_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERIES_FIELD)) {
queriesFound = true;
while (token != XContentParser.Token.END_ARRAY) {
QueryBuilder<?> query = parseContext.parseInnerQueryBuilder();
@ -157,11 +157,11 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder<DisMaxQueryBuilder>
throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]");
}
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TIE_BREAKER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TIE_BREAKER_FIELD)) {
tieBreaker = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[dis_max] query does not support [" + currentFieldName + "]");

View File

@ -98,11 +98,11 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
fieldPattern = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME +

View File

@ -118,7 +118,7 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder<FieldMask
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder)) {
throw new ParsingException(parser.getTokenLocation(), "[field_masking_span] query must be of type span query");
@ -129,11 +129,11 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder<FieldMask
+ currentFieldName + "]");
}
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
field = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -284,23 +284,23 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> i
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
value = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
value = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
prefixLength = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
maxExpansions = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TRANSPOSITIONS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TRANSPOSITIONS_FIELD)) {
transpositions = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
rewrite = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[fuzzy] query does not support [" + currentFieldName + "]");
@ -348,7 +348,7 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder<FuzzyQueryBuilder> i
query = new FuzzyQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), maxEdits, prefixLength, maxExpansions, transpositions);
}
if (query instanceof MultiTermQuery) {
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), rewrite, null);
QueryParsers.setRewriteMethod((MultiTermQuery) query, rewriteMethod);
}
return query;

View File

@ -418,30 +418,30 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
token = parser.nextToken();
if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
fieldName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TOP_FIELD)) {
top = parser.doubleValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOTTOM_FIELD)) {
bottom = parser.doubleValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LEFT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LEFT_FIELD)) {
left = parser.doubleValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, RIGHT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, RIGHT_FIELD)) {
right = parser.doubleValue();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_LEFT_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TOP_LEFT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
top = sparse.getLat();
left = sparse.getLon();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_RIGHT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOTTOM_RIGHT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
bottom = sparse.getLat();
right = sparse.getLon();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_RIGHT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TOP_RIGHT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
top = sparse.getLat();
right = sparse.getLon();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_LEFT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOTTOM_LEFT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
bottom = sparse.getLat();
left = sparse.getLon();
@ -456,22 +456,22 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
}
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
coerce = parser.booleanValue();
if (coerce) {
ignoreMalformed = true;
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
type = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
ignoreMalformed = parser.booleanValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. unexpected field [{}]",

View File

@ -377,15 +377,15 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
}
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, DISTANCE_FIELD)) {
if (token == XContentParser.Token.VALUE_STRING) {
vDistance = parser.text(); // a String
} else {
vDistance = parser.numberValue(); // a Number
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
unit = DistanceUnit.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
geoDistance = GeoDistance.fromString(parser.text());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
point.resetLat(parser.doubleValue());
@ -393,22 +393,22 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
point.resetLon(parser.doubleValue());
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
optimizeBbox = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
ignoreMalformed = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else {
if (fieldName == null) {

View File

@ -440,27 +440,27 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, FROM_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, FROM_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vFrom = parser.text(); // a String
} else {
vFrom = parser.numberValue(); // a Number
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TO_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TO_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vTo = parser.text(); // a String
} else {
vTo = parser.numberValue(); // a Number
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) {
includeLower = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) {
includeUpper = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, GT_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vFrom = parser.text(); // a String
@ -468,7 +468,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
vFrom = parser.numberValue(); // a Number
}
includeLower = false;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GTE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, GTE_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vFrom = parser.text(); // a String
@ -476,7 +476,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
vFrom = parser.numberValue(); // a Number
}
includeLower = true;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LT_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vTo = parser.text(); // a String
@ -484,7 +484,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
vTo = parser.numberValue(); // a Number
}
includeUpper = false;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LTE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LTE_FIELD)) {
if (token == XContentParser.Token.VALUE_NULL) {
} else if (token == XContentParser.Token.VALUE_STRING) {
vTo = parser.text(); // a String
@ -492,9 +492,9 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
vTo = parser.numberValue(); // a Number
}
includeUpper = true;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
unit = DistanceUnit.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
geoDistance = GeoDistance.fromString(parser.text());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
String maybeFieldName = currentFieldName.substring(0,
@ -522,17 +522,17 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
point = new GeoPoint();
}
point.resetLon(parser.doubleValue());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
optimizeBbox = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
coerce = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
ignoreMalformed = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else {
if (fieldName == null) {

View File

@ -269,7 +269,7 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, POINTS_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, POINTS_FIELD)) {
shell = new ArrayList<GeoPoint>();
while ((token = parser.nextToken()) != Token.END_ARRAY) {
shell.add(GeoUtils.parseGeoPoint(parser));
@ -288,16 +288,16 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
queryName = parser.text();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
coerce = parser.booleanValue();
if (coerce) {
ignoreMalformed = true;
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
ignoreMalformed = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -144,7 +144,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
super(in);
fieldName = in.readString();
if (in.readBoolean()) {
shape = in.readShape();
shape = in.readNamedWriteable(ShapeBuilder.class);
indexedShapeId = null;
indexedShapeType = null;
} else {
@ -165,7 +165,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
boolean hasShape = shape != null;
out.writeBoolean(hasShape);
if (hasShape) {
out.writeShape(shape);
out.writeNamedWriteable(shape);
} else {
out.writeOptionalString(indexedShapeId);
out.writeOptionalString(indexedShapeType);
@ -382,9 +382,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
String[] pathElements = Strings.splitStringToArray(path, '.');
int currentPathSlot = 0;
XContentParser parser = null;
try {
parser = XContentHelper.createParser(response.getSourceAsBytesRef());
try (XContentParser parser = XContentHelper.createParser(response.getSourceAsBytesRef())) {
XContentParser.Token currentToken;
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
@ -400,10 +398,6 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
}
}
throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field");
} finally {
if (parser != null) {
parser.close();
}
}
}
@ -492,31 +486,31 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
token = parser.nextToken();
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) {
shape = ShapeBuilder.parse(parser);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) {
String strategyName = parser.text();
strategy = SpatialStrategy.fromString(strategyName);
if (strategy == null) {
throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, RELATION_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, RELATION_FIELD)) {
shapeRelation = ShapeRelation.getRelationByName(parser.text());
if (shapeRelation == null) {
throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) {
id = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) {
type = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) {
index = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) {
shapePath = parser.text();
}
} else {
@ -531,11 +525,11 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
}
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +

View File

@ -302,7 +302,7 @@ public class GeohashCellQuery {
if (parseContext.isDeprecatedSetting(field)) {
// skip
} else if (parseContext.parseFieldMatcher().match(field, PRECISION_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(field, PRECISION_FIELD)) {
token = parser.nextToken();
if (token == Token.VALUE_NUMBER) {
levels = parser.intValue();
@ -310,16 +310,16 @@ public class GeohashCellQuery {
double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS);
levels = GeoUtils.geoHashLevelsForPrecision(meters);
}
} else if (parseContext.parseFieldMatcher().match(field, NEIGHBORS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(field, NEIGHBORS_FIELD)) {
parser.nextToken();
neighbors = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(field, AbstractQueryBuilder.NAME_FIELD)) {
parser.nextToken();
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(field, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(field, IGNORE_UNMAPPED_FIELD)) {
parser.nextToken();
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(field, AbstractQueryBuilder.BOOST_FIELD)) {
parser.nextToken();
boost = parser.floatValue();
} else {

View File

@ -247,27 +247,27 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iqb = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHitBuilder = InnerHitBuilder.fromXContent(parseContext);
} else {
throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
childType = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
scoreMode = parseScoreMode(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_CHILDREN_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MIN_CHILDREN_FIELD)) {
minChildren = parser.intValue(true);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_CHILDREN_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_CHILDREN_FIELD)) {
maxChildren = parser.intValue(true);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");

View File

@ -249,17 +249,17 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
iqb = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHits = InnerHitBuilder.fromXContent(parseContext);
} else {
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
parentType = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
String scoreModeValue = parser.text();
if ("score".equals(scoreModeValue)) {
score = true;
@ -269,13 +269,13 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" +
scoreModeValue + "] as an option for score_mode");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SCORE_FIELD)) {
score = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[has_parent] query does not support [" + currentFieldName + "]");

View File

@ -141,7 +141,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, VALUES_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUES_FIELD)) {
idsProvided = true;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if ((token == XContentParser.Token.VALUE_STRING) ||
@ -156,7 +156,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
"Illegal value for id, expecting a string or number, got: " + token);
}
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String value = parser.textOrNull();
if (value == null) {
@ -169,11 +169,11 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
"] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
types = Collections.singletonList(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME +

View File

@ -157,15 +157,15 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder<IndicesQueryBuilde
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
innerQuery = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
noMatchQuery = parseContext.parseInnerQueryBuilder();
} else {
throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, INDICES_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, INDICES_FIELD)) {
if (indices.isEmpty() == false) {
throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified");
}
@ -180,16 +180,16 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder<IndicesQueryBuilde
throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, INDEX_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEX_FIELD)) {
if (indices.isEmpty() == false) {
throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified");
}
indices.add(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
noMatchQuery = parseNoMatchQuery(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]");

View File

@ -71,9 +71,9 @@ public class MatchAllQueryBuilder extends AbstractQueryBuilder<MatchAllQueryBuil
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + MatchAllQueryBuilder.NAME +

View File

@ -71,9 +71,9 @@ public class MatchNoneQueryBuilder extends AbstractQueryBuilder<MatchNoneQueryBu
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "["+MatchNoneQueryBuilder.NAME +

View File

@ -214,17 +214,17 @@ public class MatchPhrasePrefixQueryBuilder extends AbstractQueryBuilder<MatchPhr
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
value = parser.objectText();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
analyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MatchPhraseQueryBuilder.SLOP_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchPhraseQueryBuilder.SLOP_FIELD)) {
slop = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
maxExpansion = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -183,15 +183,15 @@ public class MatchPhraseQueryBuilder extends AbstractQueryBuilder<MatchPhraseQue
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.QUERY_FIELD)) {
value = parser.objectText();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MatchQueryBuilder.ANALYZER_FIELD)) {
analyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
slop = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -455,7 +455,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
matchQuery.setFuzzyPrefixLength(prefixLength);
matchQuery.setMaxExpansions(maxExpansions);
matchQuery.setTranspositions(fuzzyTranspositions);
matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), fuzzyRewrite, null));
matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), fuzzyRewrite, null));
matchQuery.setLenient(lenient);
matchQuery.setCommonTermsCutoff(cutoffFrequency);
matchQuery.setZeroTermsQuery(zeroTermsQuery);
@ -540,9 +540,9 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
value = parser.objectText();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
String tStr = parser.text();
if ("boolean".equals(tStr)) {
type = MatchQuery.Type.BOOLEAN;
@ -553,31 +553,31 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + NAME + "] query does not support type " + tStr);
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
analyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
slop = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
prefixLength = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
maxExpansion = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPERATOR_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPERATOR_FIELD)) {
operator = Operator.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
minimumShouldMatch = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
fuzzyRewrite = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZY_TRANSPOSITIONS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_TRANSPOSITIONS_FIELD)) {
fuzzyTranspositions = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
lenient = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
cutOffFrequency = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ZERO_TERMS_QUERY_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ZERO_TERMS_QUERY_FIELD)) {
String zeroTermsDocs = parser.text();
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE;
@ -587,7 +587,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
throw new ParsingException(parser.getTokenLocation(),
"Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -425,10 +425,11 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (contentType == builder.contentType()) {
builder.rawField(Field.DOC.getPreferredName(), this.doc);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(this.doc);
parser.nextToken();
builder.field(Field.DOC.getPreferredName());
builder.copyCurrentStructure(parser);
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(this.doc)) {
parser.nextToken();
builder.field(Field.DOC.getPreferredName());
builder.copyCurrentStructure(parser);
}
}
}
if (this.fields != null) {
@ -841,33 +842,33 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.LIKE)) {
parseLikeField(parseContext, likeTexts, likeItems);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.UNLIKE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.UNLIKE)) {
parseLikeField(parseContext, unlikeTexts, unlikeItems);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE_TEXT)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.LIKE_TEXT)) {
likeTexts.add(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MAX_QUERY_TERMS)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.MAX_QUERY_TERMS)) {
maxQueryTerms = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MIN_TERM_FREQ)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.MIN_TERM_FREQ)) {
minTermFreq =parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MIN_DOC_FREQ)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.MIN_DOC_FREQ)) {
minDocFreq = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MAX_DOC_FREQ)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.MAX_DOC_FREQ)) {
maxDocFreq = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MIN_WORD_LENGTH)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.MIN_WORD_LENGTH)) {
minWordLength = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MAX_WORD_LENGTH)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.MAX_WORD_LENGTH)) {
maxWordLength = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.ANALYZER)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.ANALYZER)) {
analyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.MINIMUM_SHOULD_MATCH)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.MINIMUM_SHOULD_MATCH)) {
minimumShouldMatch = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.BOOST_TERMS)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.BOOST_TERMS)) {
boostTerms = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.INCLUDE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.INCLUDE)) {
include = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.FAIL_ON_UNSUPPORTED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.FAIL_ON_UNSUPPORTED_FIELD)) {
failOnUnsupportedField = parser.booleanValue();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
@ -877,34 +878,34 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, Field.FIELDS)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.FIELDS)) {
fields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
fields.add(parser.text());
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.LIKE)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseLikeField(parseContext, likeTexts, likeItems);
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.UNLIKE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.UNLIKE)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseLikeField(parseContext, unlikeTexts, unlikeItems);
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.IDS)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.IDS)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (!token.isValue()) {
throw new IllegalArgumentException("ids array element should only contain ids");
}
likeItems.add(new Item(null, null, parser.text()));
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.DOCS)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.DOCS)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("docs array element should include an object");
}
likeItems.add(Item.parse(parser, parseContext.parseFieldMatcher(), new Item()));
likeItems.add(Item.parse(parser, parseContext.getParseFieldMatcher(), new Item()));
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.STOP_WORDS)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.STOP_WORDS)) {
stopWords = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
stopWords.add(parser.text());
@ -913,9 +914,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.LIKE)) {
parseLikeField(parseContext, likeTexts, likeItems);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.UNLIKE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Field.UNLIKE)) {
parseLikeField(parseContext, unlikeTexts, unlikeItems);
} else {
throw new ParsingException(parser.getTokenLocation(), "[mlt] query does not support [" + currentFieldName + "]");
@ -963,7 +964,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (parser.currentToken().isValue()) {
texts.add(parser.text());
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
items.add(Item.parse(parser, parseContext.parseFieldMatcher(), new Item()));
items.add(Item.parse(parser, parseContext.getParseFieldMatcher(), new Item()));
} else {
throw new IllegalArgumentException("Content of 'like' parameter should either be a string or an object");
}

View File

@ -584,7 +584,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
parseFieldAndBoost(parser, fieldsBoosts);
@ -596,37 +596,37 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
"[" + NAME + "] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
value = parser.objectText();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
type = MultiMatchQueryBuilder.Type.parse(parser.text(), parseContext.parseFieldMatcher());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
type = MultiMatchQueryBuilder.Type.parse(parser.text(), parseContext.getParseFieldMatcher());
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
analyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
slop = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_LENGTH_FIELD)) {
prefixLength = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_EXPANSIONS_FIELD)) {
maxExpansions = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPERATOR_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPERATOR_FIELD)) {
operator = Operator.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
minimumShouldMatch = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
fuzzyRewrite = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, USE_DIS_MAX_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, USE_DIS_MAX_FIELD)) {
useDisMax = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TIE_BREAKER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TIE_BREAKER_FIELD)) {
tieBreaker = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, CUTOFF_FREQUENCY_FIELD)) {
cutoffFrequency = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
lenient = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ZERO_TERMS_QUERY_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ZERO_TERMS_QUERY_FIELD)) {
String zeroTermsDocs = parser.text();
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE;
@ -635,7 +635,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
} else {
throw new ParsingException(parser.getTokenLocation(), "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),
@ -716,7 +716,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
multiMatchQuery.setMaxExpansions(maxExpansions);
multiMatchQuery.setOccur(operator.toBooleanClauseOccur());
if (fuzzyRewrite != null) {
multiMatchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), fuzzyRewrite, null));
multiMatchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), fuzzyRewrite, null));
}
if (tieBreaker != null) {
multiMatchQuery.setTieBreaker(tieBreaker);

View File

@ -173,23 +173,23 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
query = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
innerHitBuilder = InnerHitBuilder.fromXContent(parseContext);
} else {
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, PATH_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, PATH_FIELD)) {
path = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
scoreMode = HasChildQueryBuilder.parseScoreMode(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[nested] query does not support [" + currentFieldName + "]");

View File

@ -130,15 +130,15 @@ public final class ParentIdQueryBuilder extends AbstractQueryBuilder<ParentIdQue
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
type = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ID_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ID_FIELD)) {
id = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");

View File

@ -177,10 +177,11 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
if (contentType == builder.contentType()) {
builder.rawField(DOCUMENT_FIELD.getPreferredName(), document);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(document);
parser.nextToken();
builder.field(DOCUMENT_FIELD.getPreferredName());
builder.copyCurrentStructure(parser);
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(document)) {
parser.nextToken();
builder.field(DOCUMENT_FIELD.getPreferredName());
builder.copyCurrentStructure(parser);
}
}
}
if (indexedDocumentIndex != null || indexedDocumentType != null || indexedDocumentId != null) {
@ -230,7 +231,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, DOCUMENT_FIELD)) {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser);
builder.flush();
@ -241,23 +242,23 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
"] query does not support [" + token + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_TYPE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, DOCUMENT_TYPE_FIELD)) {
documentType = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_INDEX)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_INDEX)) {
indexedDocumentIndex = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_TYPE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_TYPE)) {
indexedDocumentType = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ID)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ID)) {
indexedDocumentId = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ROUTING)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ROUTING)) {
indexedDocumentRouting = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_PREFERENCE)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_PREFERENCE)) {
indexedDocumentPreference = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_VERSION)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_VERSION)) {
indexedDocumentVersion = parser.longValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME +

View File

@ -140,13 +140,13 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder<PrefixQueryBuilder>
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PREFIX_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PREFIX_FIELD)) {
value = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
rewrite = parser.textOrNull();
} else {
throw new ParsingException(parser.getTokenLocation(),
@ -176,7 +176,7 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder<PrefixQueryBuilder>
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), rewrite, null);
Query query = null;
MappedFieldType fieldType = context.fieldMapper(fieldName);

View File

@ -19,52 +19,37 @@
package org.elasticsearch.index.query;
import java.io.IOException;
import java.util.Objects;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import java.io.IOException;
public class QueryParseContext {
public class QueryParseContext implements ParseFieldMatcherSupplier {
private static final ParseField CACHE = new ParseField("_cache").withAllDeprecated("Elasticsearch makes its own caching decisions");
private static final ParseField CACHE_KEY = new ParseField("_cache_key").withAllDeprecated("Filters are always used as cache keys");
private XContentParser parser;
private ParseFieldMatcher parseFieldMatcher = ParseFieldMatcher.EMPTY;
private final XContentParser parser;
private final IndicesQueriesRegistry indicesQueriesRegistry;
private final ParseFieldMatcher parseFieldMatcher;
private IndicesQueriesRegistry indicesQueriesRegistry;
public QueryParseContext(IndicesQueriesRegistry registry) {
this.indicesQueriesRegistry = registry;
}
public void reset(XContentParser jp) {
this.parseFieldMatcher = ParseFieldMatcher.EMPTY;
this.parser = jp;
if (parser != null) {
this.parser.setParseFieldMatcher(parseFieldMatcher);
}
public QueryParseContext(IndicesQueriesRegistry registry, XContentParser parser, ParseFieldMatcher parseFieldMatcher) {
this.indicesQueriesRegistry = Objects.requireNonNull(registry, "indices queries registry cannot be null");
this.parser = Objects.requireNonNull(parser, "parser cannot be null");
this.parseFieldMatcher = Objects.requireNonNull(parseFieldMatcher, "parse field matcher cannot be null");
}
public XContentParser parser() {
return this.parser;
}
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
if (parseFieldMatcher == null) {
throw new IllegalArgumentException("parseFieldMatcher must not be null");
}
if (parser != null) {
parser.setParseFieldMatcher(parseFieldMatcher);
}
this.parseFieldMatcher = parseFieldMatcher;
}
public boolean isDeprecatedSetting(String setting) {
return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY);
return this.parseFieldMatcher.match(setting, CACHE) || this.parseFieldMatcher.match(setting, CACHE_KEY);
}
/**
@ -120,7 +105,7 @@ public class QueryParseContext {
if (token != XContentParser.Token.START_OBJECT && token != XContentParser.Token.START_ARRAY) {
throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object");
}
QueryBuilder<?> result = indicesQueriesRegistry.lookup(queryName, parser).fromXContent(this);
QueryBuilder<?> result = indicesQueriesRegistry.lookup(queryName, parser, parseFieldMatcher).fromXContent(this);
if (parser.currentToken() == XContentParser.Token.END_OBJECT || parser.currentToken() == XContentParser.Token.END_ARRAY) {
// if we are at END_OBJECT, move to the next one...
parser.nextToken();
@ -128,11 +113,8 @@ public class QueryParseContext {
return result;
}
public ParseFieldMatcher parseFieldMatcher() {
@Override
public ParseFieldMatcher getParseFieldMatcher() {
return parseFieldMatcher;
}
public void parser(XContentParser innerParser) {
this.parser = innerParser;
}
}

View File

@ -20,6 +20,9 @@ package org.elasticsearch.index.query;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@ -28,12 +31,11 @@ import org.elasticsearch.script.ScriptService;
/**
* Context object used to rewrite {@link QueryBuilder} instances into simplified version.
*/
public class QueryRewriteContext {
public class QueryRewriteContext implements ParseFieldMatcherSupplier {
protected final MapperService mapperService;
protected final ScriptService scriptService;
protected final IndexSettings indexSettings;
protected final IndicesQueriesRegistry indicesQueriesRegistry;
protected final QueryParseContext parseContext;
protected final IndexReader reader;
public QueryRewriteContext(IndexSettings indexSettings, MapperService mapperService, ScriptService scriptService,
@ -42,7 +44,6 @@ public class QueryRewriteContext {
this.scriptService = scriptService;
this.indexSettings = indexSettings;
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.parseContext = new QueryParseContext(indicesQueriesRegistry);
this.reader = reader;
}
@ -80,12 +81,16 @@ public class QueryRewriteContext {
return reader;
}
@Override
public ParseFieldMatcher getParseFieldMatcher() {
return this.indexSettings.getParseFieldMatcher();
}
/**
* Returns a new {@link QueryParseContext} to parse template or wrapped queries.
* Returns a new {@link QueryParseContext} that wraps the provided parser, using the ParseFieldMatcher settings that
* are configured in the index settings
*/
public QueryParseContext newParseContext() {
QueryParseContext queryParseContext = new QueryParseContext(indicesQueriesRegistry);
queryParseContext.parseFieldMatcher(parseContext.parseFieldMatcher());
return queryParseContext;
public QueryParseContext newParseContext(XContentParser parser) {
return new QueryParseContext(indicesQueriesRegistry, parser, indexSettings.getParseFieldMatcher());
}
}

View File

@ -19,6 +19,14 @@
package org.elasticsearch.index.query;
import static java.util.Collections.unmodifiableMap;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queryparser.classic.MapperQueryParser;
@ -28,7 +36,6 @@ import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
@ -49,7 +56,6 @@ import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.percolator.PercolatorQueryCache;
import org.elasticsearch.index.query.support.InnerHitBuilder;
import org.elasticsearch.index.query.support.InnerHitsBuilder;
import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@ -58,14 +64,6 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.unmodifiableMap;
/**
* Context object used to create lucene queries on the shard level.
*/
@ -114,28 +112,14 @@ public class QueryShardContext extends QueryRewriteContext {
this.types = source.getTypes();
}
public void parseFieldMatcher(ParseFieldMatcher parseFieldMatcher) {
this.parseContext.parseFieldMatcher(parseFieldMatcher);
}
public ParseFieldMatcher parseFieldMatcher() {
return parseContext.parseFieldMatcher();
}
public void reset() {
allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.parseFieldMatcher(ParseFieldMatcher.EMPTY);
this.lookup = null;
this.namedQueries.clear();
this.nestedScope = new NestedScope();
this.isFilter = false;
}
public void reset(XContentParser jp) {
this.reset();
this.parseContext.reset(jp);
}
public AnalysisService getAnalysisService() {
return mapperService.analysisService();
}
@ -301,10 +285,6 @@ public class QueryShardContext extends QueryRewriteContext {
return indexSettings.getIndexVersionCreated();
}
public QueryParseContext parseContext() {
return this.parseContext;
}
public boolean matchesIndices(String... indices) {
for (String index : indices) {
if (indexSettings.matchesIndexName(index)) {
@ -343,38 +323,35 @@ public class QueryShardContext extends QueryRewriteContext {
*/
@Nullable
public ParsedQuery parseInnerFilter(XContentParser parser) throws IOException {
reset(parser);
reset();
try {
parseFieldMatcher(indexSettings.getParseFieldMatcher());
Query filter = QueryBuilder.rewriteQuery(parseContext().parseInnerQueryBuilder(), this).toFilter(this);
Query filter = QueryBuilder.rewriteQuery(newParseContext(parser).parseInnerQueryBuilder(), this).toFilter(this);
if (filter == null) {
return null;
}
return new ParsedQuery(filter, copyNamedQueries());
} finally {
reset(null);
reset();
}
}
private ParsedQuery innerParse(XContentParser parser) throws IOException, QueryShardException {
reset(parser);
reset();
try {
parseFieldMatcher(indexSettings.getParseFieldMatcher());
Query query = parseInnerQuery();
Query query = parseInnerQuery(parser);
return new ParsedQuery(query, copyNamedQueries());
} finally {
reset(null);
reset();
}
}
public Query parseInnerQuery() throws IOException {
return toQuery(this.parseContext().parseInnerQueryBuilder(), this);
public Query parseInnerQuery(XContentParser parser) throws IOException {
return toQuery(this.newParseContext(parser).parseInnerQueryBuilder(), this);
}
public ParsedQuery toQuery(QueryBuilder<?> queryBuilder) {
reset();
parseFieldMatcher(indexSettings.getParseFieldMatcher());
try {
Query query = toQuery(queryBuilder, this);
return new ParsedQuery(query, copyNamedQueries());

View File

@ -667,7 +667,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String fField = null;
float fBoost = AbstractQueryBuilder.DEFAULT_BOOST;
@ -691,65 +691,65 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
"] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
queryString = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DEFAULT_FIELD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DEFAULT_FIELD_FIELD)) {
defaultField = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DEFAULT_OPERATOR_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DEFAULT_OPERATOR_FIELD)) {
defaultOperator = Operator.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
analyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, QUOTE_ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, QUOTE_ANALYZER_FIELD)) {
quoteAnalyzer = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ALLOW_LEADING_WILDCARD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ALLOW_LEADING_WILDCARD_FIELD)) {
allowLeadingWildcard = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AUTO_GENERATED_PHRASE_QUERIES_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AUTO_GENERATED_PHRASE_QUERIES_FIELD)) {
autoGeneratePhraseQueries = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_DETERMINED_STATES_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_DETERMINED_STATES_FIELD)) {
maxDeterminizedStates = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOWERCASE_EXPANDED_TERMS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOWERCASE_EXPANDED_TERMS_FIELD)) {
lowercaseExpandedTerms = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ENABLE_POSITION_INCREMENTS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ENABLE_POSITION_INCREMENTS_FIELD)) {
enablePositionIncrements = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ESCAPE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ESCAPE_FIELD)) {
escape = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, USE_DIS_MAX_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, USE_DIS_MAX_FIELD)) {
useDisMax = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZY_PREFIX_LENGTH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_PREFIX_LENGTH_FIELD)) {
fuzzyPrefixLength = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZY_MAX_EXPANSIONS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_MAX_EXPANSIONS_FIELD)) {
fuzzyMaxExpansions = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FUZZY_REWRITE_FIELD)) {
fuzzyRewrite = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PHRASE_SLOP_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PHRASE_SLOP_FIELD)) {
phraseSlop = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TIE_BREAKER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TIE_BREAKER_FIELD)) {
tieBreaker = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZE_WILDCARD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZE_WILDCARD_FIELD)) {
analyzeWildcard = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
rewrite = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
minimumShouldMatch = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, QUOTE_FIELD_SUFFIX_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, QUOTE_FIELD_SUFFIX_FIELD)) {
quoteFieldSuffix = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
lenient = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOCALE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOCALE_FIELD)) {
String localeStr = parser.text();
locale = Locale.forLanguageTag(localeStr);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TIME_ZONE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TIME_ZONE_FIELD)) {
try {
timeZone = parser.text();
} catch (IllegalArgumentException e) {
throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME +
"] time_zone [" + parser.text() + "] is unknown");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME +
@ -897,11 +897,11 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
qpSettings.fuzziness(fuzziness);
qpSettings.fuzzyPrefixLength(fuzzyPrefixLength);
qpSettings.fuzzyMaxExpansions(fuzzyMaxExpansions);
qpSettings.fuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), this.fuzzyRewrite));
qpSettings.fuzzyRewriteMethod(QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), this.fuzzyRewrite));
qpSettings.phraseSlop(phraseSlop);
qpSettings.useDisMax(useDisMax);
qpSettings.tieBreaker(tieBreaker);
qpSettings.rewriteMethod(QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), this.rewrite));
qpSettings.rewriteMethod(QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), this.rewrite));
qpSettings.lenient(lenient == null ? context.queryStringLenient() : lenient);
qpSettings.timeZone(timeZone);
qpSettings.maxDeterminizedStates(maxDeterminizedStates);

View File

@ -323,33 +323,33 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, FROM_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, FROM_FIELD)) {
from = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TO_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TO_FIELD)) {
to = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) {
includeLower = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) {
includeUpper = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, GT_FIELD)) {
from = parser.objectBytes();
includeLower = false;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GTE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, GTE_FIELD)) {
from = parser.objectBytes();
includeLower = true;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LT_FIELD)) {
to = parser.objectBytes();
includeUpper = false;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LTE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LTE_FIELD)) {
to = parser.objectBytes();
includeUpper = true;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TIME_ZONE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TIME_ZONE_FIELD)) {
timeZone = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FORMAT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FORMAT_FIELD)) {
format = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),
@ -358,9 +358,9 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
}
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDDATA_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELDDATA_FIELD)) {
// ignore
} else {
throw new ParsingException(parser.getTokenLocation(), "[range] query does not support [" + currentFieldName + "]");

View File

@ -203,20 +203,20 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
value = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
rewrite = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FLAGS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FLAGS_FIELD)) {
String flags = parser.textOrNull();
flagsValue = RegexpFlag.resolveValue(flags);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_DETERMINIZED_STATES_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MAX_DETERMINIZED_STATES_FIELD)) {
maxDeterminizedStates = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FLAGS_VALUE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FLAGS_VALUE_FIELD)) {
flagsValue = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),
@ -225,7 +225,7 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
}
}
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
queryName = parser.text();
} else {
fieldName = currentFieldName;
@ -252,7 +252,7 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
@Override
protected Query doToQuery(QueryShardContext context) throws QueryShardException, IOException {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), rewrite, null);
Query query = null;
MappedFieldType fieldType = context.fieldMapper(fieldName);

View File

@ -112,20 +112,20 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, parseContext.parseFieldMatcher());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, parseContext.getParseFieldMatcher());
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) {
// TODO remove in 3.0 (here to support old script APIs)
params = parser.map();
} else {
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) {
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.getParseFieldMatcher())) {
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
}
}

View File

@ -19,9 +19,9 @@
package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
@ -29,13 +29,15 @@ import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.search.SynonymQuery;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.List;
import java.util.ArrayList;
/**
* Wrapper class for Lucene's SimpleQueryParser that allows us to redefine
@ -47,7 +49,8 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
private QueryShardContext context;
/** Creates a new parser with custom flags used to enable/disable certain features. */
public SimpleQueryParser(Analyzer analyzer, Map<String, Float> weights, int flags, Settings settings, QueryShardContext context) {
public SimpleQueryParser(Analyzer analyzer, Map<String, Float> weights, int flags,
Settings settings, QueryShardContext context) {
super(analyzer, weights, flags);
this.settings = settings;
this.context = context;
@ -167,62 +170,79 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
/**
* Analyze the given string using its analyzer, constructing either a
* {@code PrefixQuery} or a {@code BooleanQuery} made up
* of {@code PrefixQuery}s
* of {@code TermQuery}s and {@code PrefixQuery}s
*/
private Query newPossiblyAnalyzedQuery(String field, String termStr) {
List<List<String>> tlist = new ArrayList<> ();
// get Analyzer from superclass and tokenize the term
try (TokenStream source = getAnalyzer().tokenStream(field, termStr)) {
// Use the analyzer to get all the tokens, and then build a TermQuery,
// PhraseQuery, or nothing based on the term count
CachingTokenFilter buffer = new CachingTokenFilter(source);
buffer.reset();
source.reset();
List<String> currentPos = new ArrayList<>();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
PositionIncrementAttribute posAtt = source.addAttribute(PositionIncrementAttribute.class);
TermToBytesRefAttribute termAtt = null;
int numTokens = 0;
boolean hasMoreTokens = false;
termAtt = buffer.getAttribute(TermToBytesRefAttribute.class);
if (termAtt != null) {
try {
hasMoreTokens = buffer.incrementToken();
while (hasMoreTokens) {
numTokens++;
hasMoreTokens = buffer.incrementToken();
try {
boolean hasMoreTokens = source.incrementToken();
while (hasMoreTokens) {
if (currentPos.isEmpty() == false && posAtt.getPositionIncrement() > 0) {
tlist.add(currentPos);
currentPos = new ArrayList<>();
}
} catch (IOException e) {
// ignore
currentPos.add(termAtt.toString());
hasMoreTokens = source.incrementToken();
}
}
// rewind buffer
buffer.reset();
if (numTokens == 0) {
return null;
} else if (numTokens == 1) {
try {
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
} catch (IOException e) {
// safe to ignore, because we know the number of tokens
if (currentPos.isEmpty() == false) {
tlist.add(currentPos);
}
return new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef())));
} else {
BooleanQuery.Builder bq = new BooleanQuery.Builder();
for (int i = 0; i < numTokens; i++) {
try {
boolean hasNext = buffer.incrementToken();
assert hasNext == true;
} catch (IOException e) {
// safe to ignore, because we know the number of tokens
}
bq.add(new BooleanClause(new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef()))), BooleanClause.Occur.SHOULD));
}
return bq.build();
} catch (IOException e) {
// ignore
// TODO: we should not ignore the exception and return a prefix query with the original term ?
}
} catch (IOException e) {
// Bail on any exceptions, going with a regular prefix query
return new PrefixQuery(new Term(field, termStr));
}
if (tlist.size() == 0) {
return null;
}
if (tlist.size() == 1 && tlist.get(0).size() == 1) {
return new PrefixQuery(new Term(field, tlist.get(0).get(0)));
}
// build a boolean query with prefix on the last position only.
BooleanQuery.Builder builder = new BooleanQuery.Builder();
for (int pos = 0; pos < tlist.size(); pos++) {
List<String> plist = tlist.get(pos);
boolean isLastPos = (pos == tlist.size()-1);
Query posQuery;
if (plist.size() == 1) {
if (isLastPos) {
posQuery = new PrefixQuery(new Term(field, plist.get(0)));
} else {
posQuery = newTermQuery(new Term(field, plist.get(0)));
}
} else if (isLastPos == false) {
// build a synonym query for terms in the same position.
Term[] terms = new Term[plist.size()];
for (int i = 0; i < plist.size(); i++) {
terms[i] = new Term(field, plist.get(i));
}
posQuery = new SynonymQuery(terms);
} else {
BooleanQuery.Builder innerBuilder = new BooleanQuery.Builder();
for (String token : plist) {
innerBuilder.add(new BooleanClause(new PrefixQuery(new Term(field, token)),
BooleanClause.Occur.SHOULD));
}
posQuery = innerBuilder.setDisableCoord(true).build();
}
builder.add(new BooleanClause(posQuery, getDefaultOperator()));
}
return builder.build();
}
/**
* Class encapsulating the settings for the SimpleQueryString query, with
* their default values

View File

@ -439,7 +439,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String fField = null;
float fBoost = 1;
@ -463,15 +463,15 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
"] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
queryBody = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
analyzerName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DEFAULT_OPERATOR_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, DEFAULT_OPERATOR_FIELD)) {
defaultOperator = Operator.fromString(parser.text());
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FLAGS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FLAGS_FIELD)) {
if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) {
// Possible options are:
// ALL, NONE, AND, OR, PREFIX, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE, FUZZY, NEAR, SLOP
@ -482,18 +482,18 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
flags = SimpleQueryStringFlag.ALL.value();
}
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOCALE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOCALE_FIELD)) {
String localeStr = parser.text();
locale = Locale.forLanguageTag(localeStr);
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOWERCASE_EXPANDED_TERMS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LOWERCASE_EXPANDED_TERMS_FIELD)) {
lowercaseExpandedTerms = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
lenient = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZE_WILDCARD_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, ANALYZE_WILDCARD_FIELD)) {
analyzeWildcard = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
minimumShouldMatch = parser.textOrNull();
} else {
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME +

View File

@ -115,13 +115,13 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder<SpanContain
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, BIG_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, BIG_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder<?>)) {
throw new ParsingException(parser.getTokenLocation(), "span_containing [big] must be of type span query");
}
big = (SpanQueryBuilder<?>) query;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LITTLE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LITTLE_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder<?>)) {
throw new ParsingException(parser.getTokenLocation(), "span_containing [little] must be of type span query");
@ -131,9 +131,9 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder<SpanContain
throw new ParsingException(parser.getTokenLocation(),
"[span_containing] query does not support [" + currentFieldName + "]");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -116,7 +116,7 @@ public class SpanFirstQueryBuilder extends AbstractQueryBuilder<SpanFirstQueryBu
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, MATCH_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, MATCH_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder)) {
throw new ParsingException(parser.getTokenLocation(), "spanFirst [match] must be of type span query");
@ -126,11 +126,11 @@ public class SpanFirstQueryBuilder extends AbstractQueryBuilder<SpanFirstQueryBu
throw new ParsingException(parser.getTokenLocation(), "[span_first] query does not support [" + currentFieldName + "]");
}
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, END_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, END_FIELD)) {
end = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[span_first] query does not support [" + currentFieldName + "]");

View File

@ -93,7 +93,7 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder<SpanMultiTer
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, MATCH_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, MATCH_FIELD)) {
QueryBuilder innerQuery = parseContext.parseInnerQueryBuilder();
if (innerQuery instanceof MultiTermQueryBuilder == false) {
throw new ParsingException(parser.getTokenLocation(),
@ -104,9 +104,9 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder<SpanMultiTer
throw new ParsingException(parser.getTokenLocation(), "[span_multi] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "[span_multi] query does not support [" + currentFieldName + "]");

View File

@ -158,7 +158,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, CLAUSES_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, CLAUSES_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder)) {
@ -170,15 +170,15 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
throw new ParsingException(parser.getTokenLocation(), "[span_near] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, IN_ORDER_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, IN_ORDER_FIELD)) {
inOrder = parser.booleanValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COLLECT_PAYLOADS_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, COLLECT_PAYLOADS_FIELD)) {
// Deprecated in 3.0.0
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
slop = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[span_near] query does not support [" + currentFieldName + "]");

View File

@ -182,13 +182,13 @@ public class SpanNotQueryBuilder extends AbstractQueryBuilder<SpanNotQueryBuilde
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder)) {
throw new ParsingException(parser.getTokenLocation(), "spanNot [include] must be of type span query");
}
include = (SpanQueryBuilder) query;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, EXCLUDE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, EXCLUDE_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder)) {
throw new ParsingException(parser.getTokenLocation(), "spanNot [exclude] must be of type span query");
@ -198,15 +198,15 @@ public class SpanNotQueryBuilder extends AbstractQueryBuilder<SpanNotQueryBuilde
throw new ParsingException(parser.getTokenLocation(), "[span_not] query does not support [" + currentFieldName + "]");
}
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, DIST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, DIST_FIELD)) {
dist = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PRE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, PRE_FIELD)) {
pre = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, POST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, POST_FIELD)) {
post = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[span_not] query does not support [" + currentFieldName + "]");

View File

@ -109,7 +109,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder<SpanOrQueryBuilder>
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseContext.parseFieldMatcher().match(currentFieldName, CLAUSES_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, CLAUSES_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (!(query instanceof SpanQueryBuilder)) {
@ -121,9 +121,9 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder<SpanOrQueryBuilder>
throw new ParsingException(parser.getTokenLocation(), "[span_or] query does not support [" + currentFieldName + "]");
}
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[span_or] query does not support [" + currentFieldName + "]");

View File

@ -115,13 +115,13 @@ public class SpanTermQueryBuilder extends BaseTermQueryBuilder<SpanTermQueryBuil
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
value = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BaseTermQueryBuilder.VALUE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BaseTermQueryBuilder.VALUE_FIELD)) {
value = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -121,13 +121,13 @@ public class SpanWithinQueryBuilder extends AbstractQueryBuilder<SpanWithinQuery
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, BIG_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, BIG_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (query instanceof SpanQueryBuilder == false) {
throw new ParsingException(parser.getTokenLocation(), "span_within [big] must be of type span query");
}
big = (SpanQueryBuilder) query;
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LITTLE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LITTLE_FIELD)) {
QueryBuilder query = parseContext.parseInnerQueryBuilder();
if (query instanceof SpanQueryBuilder == false) {
throw new ParsingException(parser.getTokenLocation(), "span_within [little] must be of type span query");
@ -137,9 +137,9 @@ public class SpanWithinQueryBuilder extends AbstractQueryBuilder<SpanWithinQuery
throw new ParsingException(parser.getTokenLocation(),
"[span_within] query does not support [" + currentFieldName + "]");
}
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "[span_within] query does not support [" + currentFieldName + "]");

View File

@ -122,7 +122,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
*/
public static TemplateQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
Template template = parse(parser, parseContext.parseFieldMatcher());
Template template = parse(parser, parseContext.getParseFieldMatcher());
return new TemplateQueryBuilder(template);
}
@ -177,9 +177,8 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
ExecutableScript executable = queryRewriteContext.getScriptService().executable(template,
ScriptContext.Standard.SEARCH, Collections.emptyMap());
BytesReference querySource = (BytesReference) executable.run();
final QueryParseContext queryParseContext = queryRewriteContext.newParseContext();
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
queryParseContext.reset(qSourceParser);
final QueryParseContext queryParseContext = queryRewriteContext.newParseContext(qSourceParser);
final QueryBuilder<?> queryBuilder = queryParseContext.parseInnerQueryBuilder();
if (boost() != DEFAULT_BOOST || queryName() != null) {
final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();

View File

@ -109,13 +109,13 @@ public class TermQueryBuilder extends BaseTermQueryBuilder<TermQueryBuilder> {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TERM_FIELD)) {
value = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
value = parser.objectBytes();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -271,9 +271,9 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
fieldName = currentFieldName;
termsLookup = TermsLookup.parseTermsLookup(parser);
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -94,11 +94,11 @@ public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
type = parser.utf8Bytes();
} else {
throw new ParsingException(parser.getTokenLocation(),

View File

@ -155,15 +155,15 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuil
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if (parseContext.parseFieldMatcher().match(currentFieldName, WILDCARD_FIELD)) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, WILDCARD_FIELD)) {
value = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
value = parser.text();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) {
rewrite = parser.textOrNull();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(),
@ -198,7 +198,7 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuil
}
WildcardQuery query = new WildcardQuery(term);
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.parseFieldMatcher(), rewrite, null);
MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), rewrite, null);
QueryParsers.setRewriteMethod(query, rewriteMethod);
return query;
}

Some files were not shown because too many files have changed in this diff Show More