Merge branch 'master' into java9

This commit is contained in:
Ryan Ernst 2016-05-21 14:19:58 -07:00
commit 37d36f2f4c
554 changed files with 15878 additions and 9825 deletions

View File

@ -201,7 +201,7 @@ gradle test -Dtests.timeoutSuite=5000! ...
Change the logging level of ES (not gradle) Change the logging level of ES (not gradle)
-------------------------------- --------------------------------
gradle test -Des.logger.level=DEBUG gradle test -Dtests.logger.level=DEBUG
-------------------------------- --------------------------------
Print all the logging output from the test runs to the commandline Print all the logging output from the test runs to the commandline

View File

@ -456,7 +456,7 @@ class BuildPlugin implements Plugin<Project> {
// default test sysprop values // default test sysprop values
systemProperty 'tests.ifNoTests', 'fail' systemProperty 'tests.ifNoTests', 'fail'
// TODO: remove setting logging level via system property // TODO: remove setting logging level via system property
systemProperty 'es.logger.level', 'WARN' systemProperty 'tests.logger.level', 'WARN'
for (Map.Entry<String, String> property : System.properties.entrySet()) { for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.getKey().startsWith('tests.') || if (property.getKey().startsWith('tests.') ||
property.getKey().startsWith('es.')) { property.getKey().startsWith('es.')) {

View File

@ -129,7 +129,11 @@ class NodeInfo {
} }
env = [ 'JAVA_HOME' : project.javaHome ] env = [ 'JAVA_HOME' : project.javaHome ]
args.addAll("-E", "es.node.portsfile=true") args.addAll("-E", "node.portsfile=true")
String loggerLevel = System.getProperty("tests.logger.level")
if (loggerLevel != null) {
args.addAll("-E", "logger.level=${loggerLevel}")
}
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
env.put('ES_JAVA_OPTS', esJavaOpts) env.put('ES_JAVA_OPTS', esJavaOpts)
@ -140,7 +144,7 @@ class NodeInfo {
} }
} }
env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options')) env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options'))
args.addAll("-E", "es.path.conf=${confDir}") args.addAll("-E", "path.conf=${confDir}")
if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Os.isFamily(Os.FAMILY_WINDOWS)) {
args.add('"') // end the entire command, quoted args.add('"') // end the entire command, quoted
} }

View File

@ -1335,7 +1335,6 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]junit[/\\]listeners[/\\]LoggingListener.java" checks="LineLength" /> <suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]junit[/\\]listeners[/\\]LoggingListener.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]ESRestTestCase.java" checks="LineLength" /> <suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]ESRestTestCase.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]RestTestExecutionContext.java" checks="LineLength" /> <suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]RestTestExecutionContext.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]client[/\\]RestClient.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]client[/\\]http[/\\]HttpRequestBuilder.java" checks="LineLength" /> <suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]client[/\\]http[/\\]HttpRequestBuilder.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]json[/\\]JsonPath.java" checks="LineLength" /> <suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]json[/\\]JsonPath.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]parser[/\\]GreaterThanEqualToParser.java" checks="LineLength" /> <suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]parser[/\\]GreaterThanEqualToParser.java" checks="LineLength" />

View File

@ -13,9 +13,7 @@ jna = 4.1.0
# test dependencies # test dependencies
randomizedrunner = 2.3.2 randomizedrunner = 2.3.2
junit = 4.11 junit = 4.11
# TODO: Upgrade httpclient to a version > 4.5.1 once released. Then remove o.e.test.rest.client.StrictHostnameVerifier* and use httpclient = 4.5.2
# DefaultHostnameVerifier instead since we no longer need to workaround https://issues.apache.org/jira/browse/HTTPCLIENT-1698 httpcore = 4.4.4
httpclient = 4.3.6
httpcore = 4.3.3
commonslogging = 1.1.3 commonslogging = 1.1.3
commonscodec = 1.10 commonscodec = 1.10

View File

@ -250,7 +250,7 @@ public class TransportClusterAllocationExplainAction
final ActionListener<ClusterAllocationExplainResponse> listener) { final ActionListener<ClusterAllocationExplainResponse> listener) {
final RoutingNodes routingNodes = state.getRoutingNodes(); final RoutingNodes routingNodes = state.getRoutingNodes();
final RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, state, final RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, state,
clusterInfoService.getClusterInfo(), System.nanoTime()); clusterInfoService.getClusterInfo(), System.nanoTime(), false);
ShardRouting foundShard = null; ShardRouting foundShard = null;
if (request.useAnyUnassignedShard()) { if (request.useAnyUnassignedShard()) {

View File

@ -38,9 +38,10 @@ import java.io.IOException;
* Request to submit cluster reroute allocation commands * Request to submit cluster reroute allocation commands
*/ */
public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteRequest> { public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteRequest> {
AllocationCommands commands = new AllocationCommands(); private AllocationCommands commands = new AllocationCommands();
boolean dryRun; private boolean dryRun;
boolean explain; private boolean explain;
private boolean retryFailed;
public ClusterRerouteRequest() { public ClusterRerouteRequest() {
} }
@ -81,6 +82,15 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
return this; return this;
} }
/**
* Sets the retry failed flag (defaults to <tt>false</tt>). If true, the
* request will retry allocating shards that can't currently be allocated due to too many allocation failures.
*/
public ClusterRerouteRequest setRetryFailed(boolean retryFailed) {
this.retryFailed = retryFailed;
return this;
}
/** /**
* Returns the current explain flag * Returns the current explain flag
*/ */
@ -88,6 +98,14 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
return this.explain; return this.explain;
} }
/**
* Returns the current retry failed flag
*/
public boolean isRetryFailed() {
return this.retryFailed;
}
/** /**
* Set the allocation commands to execute. * Set the allocation commands to execute.
*/ */
@ -96,6 +114,13 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
return this; return this;
} }
/**
* Returns the allocation commands to execute
*/
public AllocationCommands getCommands() {
return commands;
}
/** /**
* Sets the source for the request. * Sets the source for the request.
*/ */
@ -136,6 +161,7 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
commands = AllocationCommands.readFrom(in); commands = AllocationCommands.readFrom(in);
dryRun = in.readBoolean(); dryRun = in.readBoolean();
explain = in.readBoolean(); explain = in.readBoolean();
retryFailed = in.readBoolean();
readTimeout(in); readTimeout(in);
} }
@ -145,6 +171,7 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
AllocationCommands.writeTo(commands, out); AllocationCommands.writeTo(commands, out);
out.writeBoolean(dryRun); out.writeBoolean(dryRun);
out.writeBoolean(explain); out.writeBoolean(explain);
out.writeBoolean(retryFailed);
writeTimeout(out); writeTimeout(out);
} }
} }

View File

@ -60,6 +60,15 @@ public class ClusterRerouteRequestBuilder extends AcknowledgedRequestBuilder<Clu
return this; return this;
} }
/**
* Sets the retry failed flag (defaults to <tt>false</tt>). If true, the
* request will retry allocating shards that can't currently be allocated due to too many allocation failures.
*/
public ClusterRerouteRequestBuilder setRetryFailed(boolean retryFailed) {
request.setRetryFailed(retryFailed);
return this;
}
/** /**
* Sets the commands for the request to execute. * Sets the commands for the request to execute.
*/ */

View File

@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -68,11 +69,28 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction<Clu
@Override @Override
protected void masterOperation(final ClusterRerouteRequest request, final ClusterState state, final ActionListener<ClusterRerouteResponse> listener) { protected void masterOperation(final ClusterRerouteRequest request, final ClusterState state, final ActionListener<ClusterRerouteResponse> listener) {
clusterService.submitStateUpdateTask("cluster_reroute (api)", new AckedClusterStateUpdateTask<ClusterRerouteResponse>(Priority.IMMEDIATE, request, listener) { clusterService.submitStateUpdateTask("cluster_reroute (api)", new ClusterRerouteResponseAckedClusterStateUpdateTask(logger,
allocationService, request, listener));
}
static class ClusterRerouteResponseAckedClusterStateUpdateTask extends AckedClusterStateUpdateTask<ClusterRerouteResponse> {
private final ClusterRerouteRequest request;
private final ActionListener<ClusterRerouteResponse> listener;
private final ESLogger logger;
private final AllocationService allocationService;
private volatile ClusterState clusterStateToSend; private volatile ClusterState clusterStateToSend;
private volatile RoutingExplanations explanations; private volatile RoutingExplanations explanations;
ClusterRerouteResponseAckedClusterStateUpdateTask(ESLogger logger, AllocationService allocationService, ClusterRerouteRequest request,
ActionListener<ClusterRerouteResponse> listener) {
super(Priority.IMMEDIATE, request, listener);
this.request = request;
this.listener = listener;
this.logger = logger;
this.allocationService = allocationService;
}
@Override @Override
protected ClusterRerouteResponse newResponse(boolean acknowledged) { protected ClusterRerouteResponse newResponse(boolean acknowledged) {
return new ClusterRerouteResponse(acknowledged, clusterStateToSend, explanations); return new ClusterRerouteResponse(acknowledged, clusterStateToSend, explanations);
@ -91,15 +109,15 @@ public class TransportClusterRerouteAction extends TransportMasterNodeAction<Clu
@Override @Override
public ClusterState execute(ClusterState currentState) { public ClusterState execute(ClusterState currentState) {
RoutingAllocation.Result routingResult = allocationService.reroute(currentState, request.commands, request.explain()); RoutingAllocation.Result routingResult = allocationService.reroute(currentState, request.getCommands(), request.explain(),
request.isRetryFailed());
ClusterState newState = ClusterState.builder(currentState).routingResult(routingResult).build(); ClusterState newState = ClusterState.builder(currentState).routingResult(routingResult).build();
clusterStateToSend = newState; clusterStateToSend = newState;
explanations = routingResult.explanations(); explanations = routingResult.explanations();
if (request.dryRun) { if (request.dryRun()) {
return currentState; return currentState;
} }
return newState; return newState;
} }
});
} }
} }

View File

@ -81,18 +81,13 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent
return snapshotInfo.status(); return snapshotInfo.status();
} }
static final class Fields {
static final String SNAPSHOT = "snapshot";
static final String ACCEPTED = "accepted";
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (snapshotInfo != null) { if (snapshotInfo != null) {
builder.field(Fields.SNAPSHOT); builder.field("snapshot");
snapshotInfo.toExternalXContent(builder, params); snapshotInfo.toExternalXContent(builder, params);
} else { } else {
builder.field(Fields.ACCEPTED, true); builder.field("accepted", true);
} }
return builder; return builder;
} }

View File

@ -74,13 +74,9 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContent {
} }
} }
static final class Fields {
static final String SNAPSHOTS = "snapshots";
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startArray(Fields.SNAPSHOTS); builder.startArray("snapshots");
for (SnapshotInfo snapshotInfo : snapshots) { for (SnapshotInfo snapshotInfo : snapshots) {
snapshotInfo.toExternalXContent(builder, params); snapshotInfo.toExternalXContent(builder, params);
} }

View File

@ -73,18 +73,13 @@ public class RestoreSnapshotResponse extends ActionResponse implements ToXConten
return restoreInfo.status(); return restoreInfo.status();
} }
static final class Fields {
static final String SNAPSHOT = "snapshot";
static final String ACCEPTED = "accepted";
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
if (restoreInfo != null) { if (restoreInfo != null) {
builder.field(Fields.SNAPSHOT); builder.field("snapshot");
restoreInfo.toXContent(builder, params); restoreInfo.toXContent(builder, params);
} else { } else {
builder.field(Fields.ACCEPTED, true); builder.field("accepted", true);
} }
return builder; return builder;
} }

View File

@ -73,13 +73,9 @@ public class SnapshotsStatusResponse extends ActionResponse implements ToXConten
} }
} }
static final class Fields {
static final String SNAPSHOTS = "snapshots";
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray(Fields.SNAPSHOTS); builder.startArray("snapshots");
for (SnapshotStatus snapshot : snapshots) { for (SnapshotStatus snapshot : snapshots) {
snapshot.toXContent(builder, params); snapshot.toXContent(builder, params);
} }

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.percolator.PercolatorQueryCacheStats;
import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.shard.DocsStats;
import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.search.suggest.completion.CompletionStats;
@ -45,7 +44,6 @@ public class ClusterStatsIndices implements ToXContent {
private QueryCacheStats queryCache; private QueryCacheStats queryCache;
private CompletionStats completion; private CompletionStats completion;
private SegmentsStats segments; private SegmentsStats segments;
private PercolatorQueryCacheStats percolatorCache;
public ClusterStatsIndices(List<ClusterStatsNodeResponse> nodeResponses) { public ClusterStatsIndices(List<ClusterStatsNodeResponse> nodeResponses) {
ObjectObjectHashMap<String, ShardStats> countsPerIndex = new ObjectObjectHashMap<>(); ObjectObjectHashMap<String, ShardStats> countsPerIndex = new ObjectObjectHashMap<>();
@ -56,7 +54,6 @@ public class ClusterStatsIndices implements ToXContent {
this.queryCache = new QueryCacheStats(); this.queryCache = new QueryCacheStats();
this.completion = new CompletionStats(); this.completion = new CompletionStats();
this.segments = new SegmentsStats(); this.segments = new SegmentsStats();
this.percolatorCache = new PercolatorQueryCacheStats();
for (ClusterStatsNodeResponse r : nodeResponses) { for (ClusterStatsNodeResponse r : nodeResponses) {
for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) { for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) {
@ -79,7 +76,6 @@ public class ClusterStatsIndices implements ToXContent {
queryCache.add(shardCommonStats.queryCache); queryCache.add(shardCommonStats.queryCache);
completion.add(shardCommonStats.completion); completion.add(shardCommonStats.completion);
segments.add(shardCommonStats.segments); segments.add(shardCommonStats.segments);
percolatorCache.add(shardCommonStats.percolatorCache);
} }
} }
@ -122,10 +118,6 @@ public class ClusterStatsIndices implements ToXContent {
return segments; return segments;
} }
public PercolatorQueryCacheStats getPercolatorCache() {
return percolatorCache;
}
static final class Fields { static final class Fields {
static final String COUNT = "count"; static final String COUNT = "count";
} }
@ -140,7 +132,6 @@ public class ClusterStatsIndices implements ToXContent {
queryCache.toXContent(builder, params); queryCache.toXContent(builder, params);
completion.toXContent(builder, params); completion.toXContent(builder, params);
segments.toXContent(builder, params); segments.toXContent(builder, params);
percolatorCache.toXContent(builder, params);
return builder; return builder;
} }

View File

@ -55,8 +55,7 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
TransportClusterStatsAction.ClusterStatsNodeRequest, ClusterStatsNodeResponse> { TransportClusterStatsAction.ClusterStatsNodeRequest, ClusterStatsNodeResponse> {
private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store,
CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments, CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments);
CommonStatsFlags.Flag.PercolatorCache);
private final NodeService nodeService; private final NodeService nodeService;
private final IndicesService indicesService; private final IndicesService indicesService;
@ -100,7 +99,7 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
for (IndexShard indexShard : indexService) { for (IndexShard indexShard : indexService) {
if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) { if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) {
// only report on fully started shards // only report on fully started shards
shardsStats.add(new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, SHARD_STATS_FLAGS), indexShard.commitStats())); shardsStats.add(new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, SHARD_STATS_FLAGS), indexShard.commitStats()));
} }
} }
} }

View File

@ -32,10 +32,8 @@ import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.percolator.PercolatorQueryCache;
import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.shard.IndexingStats;
import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.percolator.PercolatorQueryCacheStats;
import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.SearchStats;
@ -101,9 +99,6 @@ public class CommonStats implements Streamable, ToXContent {
case Segments: case Segments:
segments = new SegmentsStats(); segments = new SegmentsStats();
break; break;
case PercolatorCache:
percolatorCache = new PercolatorQueryCacheStats();
break;
case Translog: case Translog:
translog = new TranslogStats(); translog = new TranslogStats();
break; break;
@ -123,8 +118,7 @@ public class CommonStats implements Streamable, ToXContent {
} }
public CommonStats(IndicesQueryCache indicesQueryCache, PercolatorQueryCache percolatorQueryCache, public CommonStats(IndicesQueryCache indicesQueryCache, IndexShard indexShard, CommonStatsFlags flags) {
IndexShard indexShard, CommonStatsFlags flags) {
CommonStatsFlags.Flag[] setFlags = flags.getFlags(); CommonStatsFlags.Flag[] setFlags = flags.getFlags();
@ -169,9 +163,6 @@ public class CommonStats implements Streamable, ToXContent {
case Segments: case Segments:
segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); segments = indexShard.segmentStats(flags.includeSegmentFileSizes());
break; break;
case PercolatorCache:
percolatorCache = percolatorQueryCache.getStats(indexShard.shardId());
break;
case Translog: case Translog:
translog = indexShard.translogStats(); translog = indexShard.translogStats();
break; break;
@ -223,9 +214,6 @@ public class CommonStats implements Streamable, ToXContent {
@Nullable @Nullable
public FieldDataStats fieldData; public FieldDataStats fieldData;
@Nullable
public PercolatorQueryCacheStats percolatorCache;
@Nullable @Nullable
public CompletionStats completion; public CompletionStats completion;
@ -331,14 +319,6 @@ public class CommonStats implements Streamable, ToXContent {
} else { } else {
fieldData.add(stats.getFieldData()); fieldData.add(stats.getFieldData());
} }
if (percolatorCache == null) {
if (stats.getPercolatorCache() != null) {
percolatorCache = new PercolatorQueryCacheStats();
percolatorCache.add(stats.getPercolatorCache());
}
} else {
percolatorCache.add(stats.getPercolatorCache());
}
if (completion == null) { if (completion == null) {
if (stats.getCompletion() != null) { if (stats.getCompletion() != null) {
completion = new CompletionStats(); completion = new CompletionStats();
@ -436,11 +416,6 @@ public class CommonStats implements Streamable, ToXContent {
return this.fieldData; return this.fieldData;
} }
@Nullable
public PercolatorQueryCacheStats getPercolatorCache() {
return percolatorCache;
}
@Nullable @Nullable
public CompletionStats getCompletion() { public CompletionStats getCompletion() {
return completion; return completion;
@ -528,9 +503,6 @@ public class CommonStats implements Streamable, ToXContent {
if (in.readBoolean()) { if (in.readBoolean()) {
fieldData = FieldDataStats.readFieldDataStats(in); fieldData = FieldDataStats.readFieldDataStats(in);
} }
if (in.readBoolean()) {
percolatorCache = PercolatorQueryCacheStats.readPercolateStats(in);
}
if (in.readBoolean()) { if (in.readBoolean()) {
completion = CompletionStats.readCompletionStats(in); completion = CompletionStats.readCompletionStats(in);
} }
@ -610,12 +582,6 @@ public class CommonStats implements Streamable, ToXContent {
out.writeBoolean(true); out.writeBoolean(true);
fieldData.writeTo(out); fieldData.writeTo(out);
} }
if (percolatorCache == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
percolatorCache.writeTo(out);
}
if (completion == null) { if (completion == null) {
out.writeBoolean(false); out.writeBoolean(false);
} else { } else {
@ -669,9 +635,6 @@ public class CommonStats implements Streamable, ToXContent {
if (fieldData != null) { if (fieldData != null) {
fieldData.toXContent(builder, params); fieldData.toXContent(builder, params);
} }
if (percolatorCache != null) {
percolatorCache.toXContent(builder, params);
}
if (completion != null) { if (completion != null) {
completion.toXContent(builder, params); completion.toXContent(builder, params);
} }

View File

@ -240,7 +240,6 @@ public class CommonStatsFlags implements Streamable, Cloneable {
FieldData("fielddata"), FieldData("fielddata"),
Docs("docs"), Docs("docs"),
Warmer("warmer"), Warmer("warmer"),
PercolatorCache("percolator_cache"),
Completion("completion"), Completion("completion"),
Segments("segments"), Segments("segments"),
Translog("translog"), Translog("translog"),

View File

@ -184,15 +184,6 @@ public class IndicesStatsRequest extends BroadcastRequest<IndicesStatsRequest> {
return flags.isSet(Flag.FieldData); return flags.isSet(Flag.FieldData);
} }
public IndicesStatsRequest percolate(boolean percolate) {
flags.set(Flag.PercolatorCache, percolate);
return this;
}
public boolean percolate() {
return flags.isSet(Flag.PercolatorCache);
}
public IndicesStatsRequest segments(boolean segments) { public IndicesStatsRequest segments(boolean segments) {
flags.set(Flag.Segments, segments); flags.set(Flag.Segments, segments);
return this; return this;

View File

@ -127,11 +127,6 @@ public class IndicesStatsRequestBuilder extends BroadcastOperationRequestBuilder
return this; return this;
} }
public IndicesStatsRequestBuilder setPercolate(boolean percolate) {
request.percolate(percolate);
return this;
}
public IndicesStatsRequestBuilder setSegments(boolean segments) { public IndicesStatsRequestBuilder setSegments(boolean segments) {
request.segments(segments); request.segments(segments);
return this; return this;

View File

@ -139,9 +139,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction<
flags.set(CommonStatsFlags.Flag.FieldData); flags.set(CommonStatsFlags.Flag.FieldData);
flags.fieldDataFields(request.fieldDataFields()); flags.fieldDataFields(request.fieldDataFields());
} }
if (request.percolate()) {
flags.set(CommonStatsFlags.Flag.PercolatorCache);
}
if (request.segments()) { if (request.segments()) {
flags.set(CommonStatsFlags.Flag.Segments); flags.set(CommonStatsFlags.Flag.Segments);
flags.includeSegmentFileSizes(request.includeSegmentFileSizes()); flags.includeSegmentFileSizes(request.includeSegmentFileSizes());
@ -163,6 +160,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction<
flags.set(CommonStatsFlags.Flag.Recovery); flags.set(CommonStatsFlags.Flag.Recovery);
} }
return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()); return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats());
} }
} }

View File

@ -27,7 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilder;
@ -165,9 +165,9 @@ public class PercolateRequestBuilder extends ActionRequestBuilder<PercolateReque
/** /**
* Delegates to * Delegates to
* {@link PercolateSourceBuilder#addAggregation(AggregatorBuilder)} * {@link PercolateSourceBuilder#addAggregation(AggregationBuilder)}
*/ */
public PercolateRequestBuilder addAggregation(AggregatorBuilder<?> aggregationBuilder) { public PercolateRequestBuilder addAggregation(AggregationBuilder<?> aggregationBuilder) {
sourceBuilder().addAggregation(aggregationBuilder); sourceBuilder().addAggregation(aggregationBuilder);
return this; return this;
} }

View File

@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.highlight.HighlightBuilder;
@ -53,7 +53,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
private List<SortBuilder<?>> sorts; private List<SortBuilder<?>> sorts;
private Boolean trackScores; private Boolean trackScores;
private HighlightBuilder highlightBuilder; private HighlightBuilder highlightBuilder;
private List<AggregatorBuilder<?>> aggregationBuilders; private List<AggregationBuilder<?>> aggregationBuilders;
private List<PipelineAggregatorBuilder<?>> pipelineAggregationBuilders; private List<PipelineAggregatorBuilder<?>> pipelineAggregationBuilders;
/** /**
@ -126,7 +126,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
/** /**
* Add an aggregation definition. * Add an aggregation definition.
*/ */
public PercolateSourceBuilder addAggregation(AggregatorBuilder<?> aggregationBuilder) { public PercolateSourceBuilder addAggregation(AggregationBuilder<?> aggregationBuilder) {
if (aggregationBuilders == null) { if (aggregationBuilders == null) {
aggregationBuilders = new ArrayList<>(); aggregationBuilders = new ArrayList<>();
} }
@ -175,7 +175,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
builder.field("aggregations"); builder.field("aggregations");
builder.startObject(); builder.startObject();
if (aggregationBuilders != null) { if (aggregationBuilders != null) {
for (AggregatorBuilder<?> aggregation : aggregationBuilders) { for (AggregationBuilder<?> aggregation : aggregationBuilders) {
aggregation.toXContent(builder, params); aggregation.toXContent(builder, params);
} }
} }

View File

@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.Template; import org.elasticsearch.script.Template;
import org.elasticsearch.search.Scroll; import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.highlight.HighlightBuilder;
@ -373,7 +373,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
/** /**
* Adds an aggregation to the search operation. * Adds an aggregation to the search operation.
*/ */
public SearchRequestBuilder addAggregation(AggregatorBuilder<?> aggregation) { public SearchRequestBuilder addAggregation(AggregationBuilder<?> aggregation) {
sourceBuilder().aggregation(aggregation); sourceBuilder().aggregation(aggregation);
return this; return this;
} }

View File

@ -177,15 +177,7 @@ final class Bootstrap {
// install SM after natives, shutdown hooks, etc. // install SM after natives, shutdown hooks, etc.
Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings)); Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings));
// We do not need to reload system properties here as we have already applied them in building the settings and node = new Node(settings) {
// reloading could cause multiple prompts to the user for values if a system property was specified with a prompt
// placeholder
Settings nodeSettings = Settings.builder()
.put(settings)
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true)
.build();
node = new Node(nodeSettings) {
@Override @Override
protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) { protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) {
BootstrapCheck.check(settings, boundTransportAddress); BootstrapCheck.check(settings, boundTransportAddress);
@ -193,13 +185,13 @@ final class Bootstrap {
}; };
} }
private static Environment initialSettings(boolean foreground, String pidFile) { private static Environment initialSettings(boolean foreground, String pidFile, Map<String, String> esSettings) {
Terminal terminal = foreground ? Terminal.DEFAULT : null; Terminal terminal = foreground ? Terminal.DEFAULT : null;
Settings.Builder builder = Settings.builder(); Settings.Builder builder = Settings.builder();
if (Strings.hasLength(pidFile)) { if (Strings.hasLength(pidFile)) {
builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile); builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile);
} }
return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal); return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, esSettings);
} }
private void start() { private void start() {
@ -233,11 +225,13 @@ final class Bootstrap {
// Set the system property before anything has a chance to trigger its use // Set the system property before anything has a chance to trigger its use
initLoggerPrefix(); initLoggerPrefix();
elasticsearchSettings(esSettings); // force the class initializer for BootstrapInfo to run before
// the security manager is installed
BootstrapInfo.init();
INSTANCE = new Bootstrap(); INSTANCE = new Bootstrap();
Environment environment = initialSettings(foreground, pidFile); Environment environment = initialSettings(foreground, pidFile, esSettings);
Settings settings = environment.settings(); Settings settings = environment.settings();
LogConfigurator.configure(settings, true); LogConfigurator.configure(settings, true);
checkForCustomConfFile(); checkForCustomConfFile();
@ -295,13 +289,6 @@ final class Bootstrap {
} }
} }
@SuppressForbidden(reason = "Sets system properties passed as CLI parameters")
private static void elasticsearchSettings(Map<String, String> esSettings) {
for (Map.Entry<String, String> esSetting : esSettings.entrySet()) {
System.setProperty(esSetting.getKey(), esSetting.getValue());
}
}
@SuppressForbidden(reason = "System#out") @SuppressForbidden(reason = "System#out")
private static void closeSystOut() { private static void closeSystOut() {
System.out.close(); System.out.close();

View File

@ -120,4 +120,8 @@ public final class BootstrapInfo {
} }
return SYSTEM_PROPERTIES; return SYSTEM_PROPERTIES;
} }
public static void init() {
}
} }

View File

@ -21,28 +21,25 @@ package org.elasticsearch.bootstrap;
import joptsimple.OptionSet; import joptsimple.OptionSet;
import joptsimple.OptionSpec; import joptsimple.OptionSpec;
import joptsimple.util.KeyValuePair;
import org.elasticsearch.Build; import org.elasticsearch.Build;
import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.SettingCommand;
import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cli.UserError; import org.elasticsearch.cli.UserError;
import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.jvm.JvmInfo;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
/** /**
* This class starts elasticsearch. * This class starts elasticsearch.
*/ */
class Elasticsearch extends Command { class Elasticsearch extends SettingCommand {
private final OptionSpec<Void> versionOption; private final OptionSpec<Void> versionOption;
private final OptionSpec<Void> daemonizeOption; private final OptionSpec<Void> daemonizeOption;
private final OptionSpec<String> pidfileOption; private final OptionSpec<String> pidfileOption;
private final OptionSpec<KeyValuePair> propertyOption;
// visible for testing // visible for testing
Elasticsearch() { Elasticsearch() {
@ -56,7 +53,6 @@ class Elasticsearch extends Command {
pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"), pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"),
"Creates a pid file in the specified path on start") "Creates a pid file in the specified path on start")
.withRequiredArg(); .withRequiredArg();
propertyOption = parser.accepts("E", "Configure an Elasticsearch setting").withRequiredArg().ofType(KeyValuePair.class);
} }
/** /**
@ -75,7 +71,7 @@ class Elasticsearch extends Command {
} }
@Override @Override
protected void execute(Terminal terminal, OptionSet options) throws Exception { protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
if (options.nonOptionArguments().isEmpty() == false) { if (options.nonOptionArguments().isEmpty() == false) {
throw new UserError(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments()); throw new UserError(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments());
} }
@ -92,18 +88,7 @@ class Elasticsearch extends Command {
final boolean daemonize = options.has(daemonizeOption); final boolean daemonize = options.has(daemonizeOption);
final String pidFile = pidfileOption.value(options); final String pidFile = pidfileOption.value(options);
final Map<String, String> esSettings = new HashMap<>(); init(daemonize, pidFile, settings);
for (final KeyValuePair kvp : propertyOption.values(options)) {
if (!kvp.key.startsWith("es.")) {
throw new UserError(ExitCodes.USAGE, "Elasticsearch settings must be prefixed with [es.] but was [" + kvp.key + "]");
}
if (kvp.value.isEmpty()) {
throw new UserError(ExitCodes.USAGE, "Elasticsearch setting [" + kvp.key + "] must not be empty");
}
esSettings.put(kvp.key, kvp.value);
}
init(daemonize, pidFile, esSettings);
} }
void init(final boolean daemonize, final String pidFile, final Map<String, String> esSettings) { void init(final boolean daemonize, final String pidFile, final Map<String, String> esSettings) {

View File

@ -19,15 +19,15 @@
package org.elasticsearch.cli; package org.elasticsearch.cli;
import java.io.IOException;
import java.util.Arrays;
import joptsimple.OptionException; import joptsimple.OptionException;
import joptsimple.OptionParser; import joptsimple.OptionParser;
import joptsimple.OptionSet; import joptsimple.OptionSet;
import joptsimple.OptionSpec; import joptsimple.OptionSpec;
import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.SuppressForbidden;
import java.io.IOException;
import java.util.Arrays;
/** /**
* An action to execute within a cli. * An action to execute within a cli.
*/ */
@ -112,4 +112,5 @@ public abstract class Command {
* *
* Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */ * Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */
protected abstract void execute(Terminal terminal, OptionSet options) throws Exception; protected abstract void execute(Terminal terminal, OptionSet options) throws Exception;
} }

View File

@ -0,0 +1,77 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cli;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import joptsimple.util.KeyValuePair;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
public abstract class SettingCommand extends Command {
private final OptionSpec<KeyValuePair> settingOption;
public SettingCommand(String description) {
super(description);
this.settingOption = parser.accepts("E", "Configure a setting").withRequiredArg().ofType(KeyValuePair.class);
}
@Override
protected void execute(Terminal terminal, OptionSet options) throws Exception {
final Map<String, String> settings = new HashMap<>();
for (final KeyValuePair kvp : settingOption.values(options)) {
if (kvp.value.isEmpty()) {
throw new UserError(ExitCodes.USAGE, "Setting [" + kvp.key + "] must not be empty");
}
settings.put(kvp.key, kvp.value);
}
putSystemPropertyIfSettingIsMissing(settings, "path.conf", "es.path.conf");
putSystemPropertyIfSettingIsMissing(settings, "path.data", "es.path.data");
putSystemPropertyIfSettingIsMissing(settings, "path.home", "es.path.home");
putSystemPropertyIfSettingIsMissing(settings, "path.logs", "es.path.logs");
execute(terminal, options, settings);
}
protected static void putSystemPropertyIfSettingIsMissing(final Map<String, String> settings, final String setting, final String key) {
final String value = System.getProperty(key);
if (value != null) {
if (settings.containsKey(setting)) {
final String message =
String.format(
Locale.ROOT,
"duplicate setting [%s] found via command-line [%s] and system property [%s]",
setting,
settings.get(setting),
value);
throw new IllegalArgumentException(message);
} else {
settings.put(setting, value);
}
}
}
protected abstract void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception;
}

View File

@ -49,6 +49,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDeci
import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.RebalanceOnlyWhenActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.RebalanceOnlyWhenActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider;
@ -79,6 +80,7 @@ public class ClusterModule extends AbstractModule {
new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), Property.NodeScope); new Setting<>("cluster.routing.allocation.type", BALANCED_ALLOCATOR, Function.identity(), Property.NodeScope);
public static final List<Class<? extends AllocationDecider>> DEFAULT_ALLOCATION_DECIDERS = public static final List<Class<? extends AllocationDecider>> DEFAULT_ALLOCATION_DECIDERS =
Collections.unmodifiableList(Arrays.asList( Collections.unmodifiableList(Arrays.asList(
MaxRetryAllocationDecider.class,
SameShardAllocationDecider.class, SameShardAllocationDecider.class,
FilterAllocationDecider.class, FilterAllocationDecider.class,
ReplicaAfterPrimaryActiveAllocationDecider.class, ReplicaAfterPrimaryActiveAllocationDecider.class,

View File

@ -281,8 +281,11 @@ public class MetaDataMappingService extends AbstractComponent {
// Also the order of the mappings may be backwards. // Also the order of the mappings may be backwards.
if (newMapper.parentFieldMapper().active()) { if (newMapper.parentFieldMapper().active()) {
for (ObjectCursor<MappingMetaData> mapping : indexMetaData.getMappings().values()) { for (ObjectCursor<MappingMetaData> mapping : indexMetaData.getMappings().values()) {
if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { String parentType = newMapper.parentFieldMapper().type();
throw new IllegalArgumentException("can't add a _parent field that points to an already existing type"); if (parentType.equals(mapping.value.type()) &&
indexService.mapperService().getParentTypes().contains(parentType) == false) {
throw new IllegalArgumentException("can't add a _parent field that points to an " +
"already existing type, that isn't already a parent");
} }
} }
} }

View File

@ -48,7 +48,6 @@ public final class UnassignedInfo implements ToXContent, Writeable {
public static final Setting<TimeValue> INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = public static final Setting<TimeValue> INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING =
Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, Property.Dynamic, Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, Property.Dynamic,
Property.IndexScope); Property.IndexScope);
/** /**
* Reason why the shard is in unassigned state. * Reason why the shard is in unassigned state.
* <p> * <p>
@ -103,7 +102,11 @@ public final class UnassignedInfo implements ToXContent, Writeable {
/** /**
* A better replica location is identified and causes the existing replica allocation to be cancelled. * A better replica location is identified and causes the existing replica allocation to be cancelled.
*/ */
REALLOCATED_REPLICA; REALLOCATED_REPLICA,
/**
* Unassigned as a result of a failed primary while the replica was initializing.
*/
PRIMARY_FAILED;
} }
private final Reason reason; private final Reason reason;
@ -112,6 +115,7 @@ public final class UnassignedInfo implements ToXContent, Writeable {
private final long lastComputedLeftDelayNanos; // how long to delay shard allocation, not serialized (always positive, 0 means no delay) private final long lastComputedLeftDelayNanos; // how long to delay shard allocation, not serialized (always positive, 0 means no delay)
private final String message; private final String message;
private final Throwable failure; private final Throwable failure;
private final int failedAllocations;
/** /**
* creates an UnassingedInfo object based **current** time * creates an UnassingedInfo object based **current** time
@ -120,7 +124,7 @@ public final class UnassignedInfo implements ToXContent, Writeable {
* @param message more information about cause. * @param message more information about cause.
**/ **/
public UnassignedInfo(Reason reason, String message) { public UnassignedInfo(Reason reason, String message) {
this(reason, message, null, System.nanoTime(), System.currentTimeMillis()); this(reason, message, null, reason == Reason.ALLOCATION_FAILED ? 1 : 0, System.nanoTime(), System.currentTimeMillis());
} }
/** /**
@ -130,13 +134,16 @@ public final class UnassignedInfo implements ToXContent, Writeable {
* @param unassignedTimeNanos the time to use as the base for any delayed re-assignment calculation * @param unassignedTimeNanos the time to use as the base for any delayed re-assignment calculation
* @param unassignedTimeMillis the time of unassignment used to display to in our reporting. * @param unassignedTimeMillis the time of unassignment used to display to in our reporting.
*/ */
public UnassignedInfo(Reason reason, @Nullable String message, @Nullable Throwable failure, long unassignedTimeNanos, long unassignedTimeMillis) { public UnassignedInfo(Reason reason, @Nullable String message, @Nullable Throwable failure, int failedAllocations, long unassignedTimeNanos, long unassignedTimeMillis) {
this.reason = reason; this.reason = reason;
this.unassignedTimeMillis = unassignedTimeMillis; this.unassignedTimeMillis = unassignedTimeMillis;
this.unassignedTimeNanos = unassignedTimeNanos; this.unassignedTimeNanos = unassignedTimeNanos;
this.lastComputedLeftDelayNanos = 0L; this.lastComputedLeftDelayNanos = 0L;
this.message = message; this.message = message;
this.failure = failure; this.failure = failure;
this.failedAllocations = failedAllocations;
assert (failedAllocations > 0) == (reason == Reason.ALLOCATION_FAILED):
"failedAllocations: " + failedAllocations + " for reason " + reason;
assert !(message == null && failure != null) : "provide a message if a failure exception is provided"; assert !(message == null && failure != null) : "provide a message if a failure exception is provided";
} }
@ -147,17 +154,19 @@ public final class UnassignedInfo implements ToXContent, Writeable {
this.lastComputedLeftDelayNanos = newComputedLeftDelayNanos; this.lastComputedLeftDelayNanos = newComputedLeftDelayNanos;
this.message = unassignedInfo.message; this.message = unassignedInfo.message;
this.failure = unassignedInfo.failure; this.failure = unassignedInfo.failure;
this.failedAllocations = unassignedInfo.failedAllocations;
} }
public UnassignedInfo(StreamInput in) throws IOException { public UnassignedInfo(StreamInput in) throws IOException {
this.reason = Reason.values()[(int) in.readByte()]; this.reason = Reason.values()[(int) in.readByte()];
this.unassignedTimeMillis = in.readLong(); this.unassignedTimeMillis = in.readLong();
// As System.nanoTime() cannot be compared across different JVMs, reset it to now. // As System.nanoTime() cannot be compared across different JVMs, reset it to now.
// This means that in master failover situations, elapsed delay time is forgotten. // This means that in master fail-over situations, elapsed delay time is forgotten.
this.unassignedTimeNanos = System.nanoTime(); this.unassignedTimeNanos = System.nanoTime();
this.lastComputedLeftDelayNanos = 0L; this.lastComputedLeftDelayNanos = 0L;
this.message = in.readOptionalString(); this.message = in.readOptionalString();
this.failure = in.readThrowable(); this.failure = in.readThrowable();
this.failedAllocations = in.readVInt();
} }
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
@ -166,12 +175,18 @@ public final class UnassignedInfo implements ToXContent, Writeable {
// Do not serialize unassignedTimeNanos as System.nanoTime() cannot be compared across different JVMs // Do not serialize unassignedTimeNanos as System.nanoTime() cannot be compared across different JVMs
out.writeOptionalString(message); out.writeOptionalString(message);
out.writeThrowable(failure); out.writeThrowable(failure);
out.writeVInt(failedAllocations);
} }
public UnassignedInfo readFrom(StreamInput in) throws IOException { public UnassignedInfo readFrom(StreamInput in) throws IOException {
return new UnassignedInfo(in); return new UnassignedInfo(in);
} }
/**
* Returns the number of previously failed allocations of this shard.
*/
public int getNumFailedAllocations() { return failedAllocations; }
/** /**
* The reason why the shard is unassigned. * The reason why the shard is unassigned.
*/ */
@ -325,7 +340,11 @@ public final class UnassignedInfo implements ToXContent, Writeable {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("[reason=").append(reason).append("]"); sb.append("[reason=").append(reason).append("]");
sb.append(", at[").append(DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)).append("]"); sb.append(", at[").append(DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)).append("]");
if (failedAllocations > 0) {
sb.append(", failed_attempts[").append(failedAllocations).append("]");
}
String details = getDetails(); String details = getDetails();
if (details != null) { if (details != null) {
sb.append(", details[").append(details).append("]"); sb.append(", details[").append(details).append("]");
} }
@ -342,6 +361,9 @@ public final class UnassignedInfo implements ToXContent, Writeable {
builder.startObject("unassigned_info"); builder.startObject("unassigned_info");
builder.field("reason", reason); builder.field("reason", reason);
builder.field("at", DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)); builder.field("at", DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis));
if (failedAllocations > 0) {
builder.field("failed_attempts", failedAllocations);
}
String details = getDetails(); String details = getDetails();
if (details != null) { if (details != null) {
builder.field("details", details); builder.field("details", details);

View File

@ -222,8 +222,10 @@ public class AllocationService extends AbstractComponent {
List<FailedRerouteAllocation.FailedShard> orderedFailedShards = new ArrayList<>(failedShards); List<FailedRerouteAllocation.FailedShard> orderedFailedShards = new ArrayList<>(failedShards);
orderedFailedShards.sort(Comparator.comparing(failedShard -> failedShard.shard.primary())); orderedFailedShards.sort(Comparator.comparing(failedShard -> failedShard.shard.primary()));
for (FailedRerouteAllocation.FailedShard failedShard : orderedFailedShards) { for (FailedRerouteAllocation.FailedShard failedShard : orderedFailedShards) {
UnassignedInfo unassignedInfo = failedShard.shard.unassignedInfo();
final int failedAllocations = unassignedInfo != null ? unassignedInfo.getNumFailedAllocations() : 0;
changed |= applyFailedShard(allocation, failedShard.shard, true, new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, failedShard.message, failedShard.failure, changed |= applyFailedShard(allocation, failedShard.shard, true, new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, failedShard.message, failedShard.failure,
System.nanoTime(), System.currentTimeMillis())); failedAllocations + 1, System.nanoTime(), System.currentTimeMillis()));
} }
if (!changed) { if (!changed) {
return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData());
@ -257,16 +259,13 @@ public class AllocationService extends AbstractComponent {
.collect(Collectors.joining(", ")); .collect(Collectors.joining(", "));
} }
public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands) { public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands, boolean explain, boolean retryFailed) {
return reroute(clusterState, commands, false);
}
public RoutingAllocation.Result reroute(ClusterState clusterState, AllocationCommands commands, boolean explain) {
RoutingNodes routingNodes = getMutableRoutingNodes(clusterState); RoutingNodes routingNodes = getMutableRoutingNodes(clusterState);
// we don't shuffle the unassigned shards here, to try and get as close as possible to // we don't shuffle the unassigned shards here, to try and get as close as possible to
// a consistent result of the effect the commands have on the routing // a consistent result of the effect the commands have on the routing
// this allows systems to dry run the commands, see the resulting cluster state, and act on it // this allows systems to dry run the commands, see the resulting cluster state, and act on it
RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, clusterInfoService.getClusterInfo(), currentNanoTime()); RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState,
clusterInfoService.getClusterInfo(), currentNanoTime(), retryFailed);
// don't short circuit deciders, we want a full explanation // don't short circuit deciders, we want a full explanation
allocation.debugDecision(true); allocation.debugDecision(true);
// we ignore disable allocation, because commands are explicit // we ignore disable allocation, because commands are explicit
@ -305,7 +304,8 @@ public class AllocationService extends AbstractComponent {
RoutingNodes routingNodes = getMutableRoutingNodes(clusterState); RoutingNodes routingNodes = getMutableRoutingNodes(clusterState);
// shuffle the unassigned nodes, just so we won't have things like poison failed shards // shuffle the unassigned nodes, just so we won't have things like poison failed shards
routingNodes.unassigned().shuffle(); routingNodes.unassigned().shuffle();
RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState, clusterInfoService.getClusterInfo(), currentNanoTime()); RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, clusterState,
clusterInfoService.getClusterInfo(), currentNanoTime(), false);
allocation.debugDecision(debug); allocation.debugDecision(debug);
if (!reroute(allocation)) { if (!reroute(allocation)) {
return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData());
@ -437,7 +437,7 @@ public class AllocationService extends AbstractComponent {
// now, go over all the shards routing on the node, and fail them // now, go over all the shards routing on the node, and fail them
for (ShardRouting shardRouting : node.copyShards()) { for (ShardRouting shardRouting : node.copyShards()) {
UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "node_left[" + node.nodeId() + "]", null, UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "node_left[" + node.nodeId() + "]", null,
allocation.getCurrentNanoTime(), System.currentTimeMillis()); 0, allocation.getCurrentNanoTime(), System.currentTimeMillis());
applyFailedShard(allocation, shardRouting, false, unassignedInfo); applyFailedShard(allocation, shardRouting, false, unassignedInfo);
} }
// its a dead node, remove it, note, its important to remove it *after* we apply failed shard // its a dead node, remove it, note, its important to remove it *after* we apply failed shard
@ -457,8 +457,8 @@ public class AllocationService extends AbstractComponent {
boolean changed = false; boolean changed = false;
for (ShardRouting routing : replicas) { for (ShardRouting routing : replicas) {
changed |= applyFailedShard(allocation, routing, false, changed |= applyFailedShard(allocation, routing, false,
new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "primary failed while replica initializing", new UnassignedInfo(UnassignedInfo.Reason.PRIMARY_FAILED, "primary failed while replica initializing",
null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); null, 0, allocation.getCurrentNanoTime(), System.currentTimeMillis()));
} }
return changed; return changed;
} }

View File

@ -58,7 +58,7 @@ public class FailedRerouteAllocation extends RoutingAllocation {
private final List<FailedShard> failedShards; private final List<FailedShard> failedShards;
public FailedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List<FailedShard> failedShards, ClusterInfo clusterInfo) { public FailedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List<FailedShard> failedShards, ClusterInfo clusterInfo) {
super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime()); super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime(), false);
this.failedShards = failedShards; this.failedShards = failedShards;
} }

View File

@ -134,6 +134,8 @@ public class RoutingAllocation {
private boolean ignoreDisable = false; private boolean ignoreDisable = false;
private final boolean retryFailed;
private boolean debugDecision = false; private boolean debugDecision = false;
private boolean hasPendingAsyncFetch = false; private boolean hasPendingAsyncFetch = false;
@ -148,7 +150,7 @@ public class RoutingAllocation {
* @param clusterState cluster state before rerouting * @param clusterState cluster state before rerouting
* @param currentNanoTime the nano time to use for all delay allocation calculation (typically {@link System#nanoTime()}) * @param currentNanoTime the nano time to use for all delay allocation calculation (typically {@link System#nanoTime()})
*/ */
public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, ClusterInfo clusterInfo, long currentNanoTime) { public RoutingAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, ClusterInfo clusterInfo, long currentNanoTime, boolean retryFailed) {
this.deciders = deciders; this.deciders = deciders;
this.routingNodes = routingNodes; this.routingNodes = routingNodes;
this.metaData = clusterState.metaData(); this.metaData = clusterState.metaData();
@ -156,6 +158,7 @@ public class RoutingAllocation {
this.customs = clusterState.customs(); this.customs = clusterState.customs();
this.clusterInfo = clusterInfo; this.clusterInfo = clusterInfo;
this.currentNanoTime = currentNanoTime; this.currentNanoTime = currentNanoTime;
this.retryFailed = retryFailed;
} }
/** returns the nano time captured at the beginning of the allocation. used to make sure all time based decisions are aligned */ /** returns the nano time captured at the beginning of the allocation. used to make sure all time based decisions are aligned */
@ -297,4 +300,8 @@ public class RoutingAllocation {
public void setHasPendingAsyncFetch() { public void setHasPendingAsyncFetch() {
this.hasPendingAsyncFetch = true; this.hasPendingAsyncFetch = true;
} }
public boolean isRetryFailed() {
return retryFailed;
}
} }

View File

@ -36,7 +36,7 @@ public class StartedRerouteAllocation extends RoutingAllocation {
private final List<? extends ShardRouting> startedShards; private final List<? extends ShardRouting> startedShards;
public StartedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List<? extends ShardRouting> startedShards, ClusterInfo clusterInfo) { public StartedRerouteAllocation(AllocationDeciders deciders, RoutingNodes routingNodes, ClusterState clusterState, List<? extends ShardRouting> startedShards, ClusterInfo clusterInfo) {
super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime()); super(deciders, routingNodes, clusterState, clusterInfo, System.nanoTime(), false);
this.startedShards = startedShards; this.startedShards = startedShards;
} }

View File

@ -125,7 +125,7 @@ public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocation
// we need to move the unassigned info back to treat it as if it was index creation // we need to move the unassigned info back to treat it as if it was index creation
unassignedInfoToUpdate = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, unassignedInfoToUpdate = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED,
"force empty allocation from previous reason " + shardRouting.unassignedInfo().getReason() + ", " + shardRouting.unassignedInfo().getMessage(), "force empty allocation from previous reason " + shardRouting.unassignedInfo().getReason() + ", " + shardRouting.unassignedInfo().getMessage(),
shardRouting.unassignedInfo().getFailure(), System.nanoTime(), System.currentTimeMillis()); shardRouting.unassignedInfo().getFailure(), 0, System.nanoTime(), System.currentTimeMillis());
} }
initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, unassignedInfoToUpdate); initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, unassignedInfoToUpdate);

View File

@ -0,0 +1,83 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation.decider;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
/**
* An allocation decider that prevents shards from being allocated on any node if the shards allocation has been retried N times without
* success. This means if a shard has been INITIALIZING N times in a row without being moved to STARTED the shard will be ignored until
* the setting for <tt>index.allocation.max_retry</tt> is raised. The default value is <tt>5</tt>.
* Note: This allocation decider also allows allocation of repeatedly failing shards when the <tt>/_cluster/reroute?retry_failed=true</tt>
* API is manually invoked. This allows single retries without raising the limits.
*
* @see RoutingAllocation#isRetryFailed()
*/
public class MaxRetryAllocationDecider extends AllocationDecider {
public static final Setting<Integer> SETTING_ALLOCATION_MAX_RETRY = Setting.intSetting("index.allocation.max_retries", 5, 0,
Setting.Property.Dynamic, Setting.Property.IndexScope);
public static final String NAME = "max_retry";
/**
* Initializes a new {@link MaxRetryAllocationDecider}
*
* @param settings {@link Settings} used by this {@link AllocationDecider}
*/
@Inject
public MaxRetryAllocationDecider(Settings settings) {
super(settings);
}
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) {
UnassignedInfo unassignedInfo = shardRouting.unassignedInfo();
if (unassignedInfo != null && unassignedInfo.getNumFailedAllocations() > 0) {
final IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index());
final int maxRetry = SETTING_ALLOCATION_MAX_RETRY.get(indexMetaData.getSettings());
if (allocation.isRetryFailed()) { // manual allocation - retry
// if we are called via the _reroute API we ignore the failure counter and try to allocate
// this improves the usability since people don't need to raise the limits to issue retries since a simple _reroute call is
// enough to manually retry.
return allocation.decision(Decision.YES, NAME, "shard has already failed allocating ["
+ unassignedInfo.getNumFailedAllocations() + "] times vs. [" + maxRetry + "] retries allowed "
+ unassignedInfo.toString() + " - retrying once on manual allocation");
} else if (unassignedInfo.getNumFailedAllocations() >= maxRetry) {
return allocation.decision(Decision.NO, NAME, "shard has already failed allocating ["
+ unassignedInfo.getNumFailedAllocations() + "] times vs. [" + maxRetry + "] retries allowed "
+ unassignedInfo.toString() + " - manually call [/_cluster/reroute?retry_failed=true] to retry");
}
}
return allocation.decision(Decision.YES, NAME, "shard has no previous failures");
}
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return canAllocate(shardRouting, allocation);
}
}

View File

@ -23,7 +23,6 @@ import org.apache.log4j.Java9Hack;
import org.apache.log4j.PropertyConfigurator; import org.apache.log4j.PropertyConfigurator;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.bootstrap.BootstrapInfo;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
@ -99,7 +98,6 @@ public class LogConfigurator {
/** /**
* Consolidates settings and converts them into actual log4j settings, then initializes loggers and appenders. * Consolidates settings and converts them into actual log4j settings, then initializes loggers and appenders.
*
* @param settings custom settings that should be applied * @param settings custom settings that should be applied
* @param resolveConfig controls whether the logging conf file should be read too or not. * @param resolveConfig controls whether the logging conf file should be read too or not.
*/ */
@ -115,7 +113,7 @@ public class LogConfigurator {
if (resolveConfig) { if (resolveConfig) {
resolveConfig(environment, settingsBuilder); resolveConfig(environment, settingsBuilder);
} }
settingsBuilder.putProperties("es.", BootstrapInfo.getSystemProperties());
// add custom settings after config was added so that they are not overwritten by config // add custom settings after config was added so that they are not overwritten by config
settingsBuilder.put(settings); settingsBuilder.put(settings);
settingsBuilder.replacePropertyPlaceholders(); settingsBuilder.replacePropertyPlaceholders();

View File

@ -87,6 +87,7 @@ import org.elasticsearch.repositories.fs.FsRepository;
import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.repositories.uri.URLRepository;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.Transport;
@ -374,7 +375,6 @@ public final class ClusterSettings extends AbstractScopedSettings {
BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX, BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX,
ClusterName.CLUSTER_NAME_SETTING, ClusterName.CLUSTER_NAME_SETTING,
Client.CLIENT_TYPE_SETTING_S, Client.CLIENT_TYPE_SETTING_S,
InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING,
ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING,
EsExecutors.PROCESSORS_SETTING, EsExecutors.PROCESSORS_SETTING,
ThreadContext.DEFAULT_HEADERS_SETTING, ThreadContext.DEFAULT_HEADERS_SETTING,
@ -420,6 +420,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
ResourceWatcherService.ENABLED, ResourceWatcherService.ENABLED,
ResourceWatcherService.RELOAD_INTERVAL_HIGH, ResourceWatcherService.RELOAD_INTERVAL_HIGH,
ResourceWatcherService.RELOAD_INTERVAL_MEDIUM, ResourceWatcherService.RELOAD_INTERVAL_MEDIUM,
ResourceWatcherService.RELOAD_INTERVAL_LOW ResourceWatcherService.RELOAD_INTERVAL_LOW,
SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING
))); )));
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.common.settings;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.gateway.PrimaryShardAllocator; import org.elasticsearch.gateway.PrimaryShardAllocator;
@ -35,12 +36,11 @@ import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.percolator.PercolatorQueryCache; import org.elasticsearch.index.percolator.PercolatorFieldMapper;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.FsDirectoryService;
import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.IndexWarmer;
import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.IndicesRequestCache;
import java.util.Arrays; import java.util.Arrays;
@ -59,6 +59,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
public static final Predicate<String> INDEX_SETTINGS_KEY_PREDICATE = (s) -> s.startsWith(IndexMetaData.INDEX_SETTING_PREFIX); public static final Predicate<String> INDEX_SETTINGS_KEY_PREDICATE = (s) -> s.startsWith(IndexMetaData.INDEX_SETTING_PREFIX);
public static final Set<Setting<?>> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( public static final Set<Setting<?>> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY,
IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING, IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING,
IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING,
IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING,
@ -126,7 +127,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
FieldMapper.IGNORE_MALFORMED_SETTING, FieldMapper.IGNORE_MALFORMED_SETTING,
FieldMapper.COERCE_SETTING, FieldMapper.COERCE_SETTING,
Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING,
PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, PercolatorFieldMapper.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING,
MapperService.INDEX_MAPPER_DYNAMIC_SETTING, MapperService.INDEX_MAPPER_DYNAMIC_SETTING,
MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING,
MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING, MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING,

View File

@ -537,6 +537,10 @@ public class Setting<T> extends ToXContentToBytes {
return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, properties); return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, properties);
} }
public static Setting<Boolean> boolSetting(String key, Function<Settings, String> defaultValueFn, Property... properties) {
return new Setting<>(key, defaultValueFn, Booleans::parseBooleanExact, properties);
}
public static Setting<ByteSizeValue> byteSizeSetting(String key, String percentage, Property... properties) { public static Setting<ByteSizeValue> byteSizeSetting(String key, String percentage, Property... properties) {
return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), properties); return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), properties);
} }

View File

@ -58,9 +58,11 @@ import java.util.Set;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue;
import static org.elasticsearch.common.unit.SizeValue.parseSizeValue; import static org.elasticsearch.common.unit.SizeValue.parseSizeValue;
@ -942,66 +944,32 @@ public final class Settings implements ToXContent {
return this; return this;
} }
/** public Builder putProperties(Map<String, String> esSettings, Predicate<String> keyPredicate, Function<String, String> keyFunction) {
* Puts all the properties with keys starting with the provided <tt>prefix</tt>. for (final Map.Entry<String, String> esSetting : esSettings.entrySet()) {
* final String key = esSetting.getKey();
* @param prefix The prefix to filter property key by if (keyPredicate.test(key)) {
* @param properties The properties to put map.put(keyFunction.apply(key), esSetting.getValue());
* @return The builder
*/
public Builder putProperties(String prefix, Dictionary<Object, Object> properties) {
for (Object property : Collections.list(properties.keys())) {
String key = Objects.toString(property);
String value = Objects.toString(properties.get(property));
if (key.startsWith(prefix)) {
map.put(key.substring(prefix.length()), value);
} }
} }
return this; return this;
} }
/** /**
* Puts all the properties with keys starting with the provided <tt>prefix</tt>. * Runs across all the settings set on this builder and
* * replaces <tt>${...}</tt> elements in each setting with
* @param prefix The prefix to filter property key by * another setting already set on this builder.
* @param properties The properties to put
* @return The builder
*/
public Builder putProperties(String prefix, Dictionary<Object, Object> properties, String ignorePrefix) {
for (Object property : Collections.list(properties.keys())) {
String key = Objects.toString(property);
String value = Objects.toString(properties.get(property));
if (key.startsWith(prefix)) {
if (!key.startsWith(ignorePrefix)) {
map.put(key.substring(prefix.length()), value);
}
}
}
return this;
}
/**
* Runs across all the settings set on this builder and replaces <tt>${...}</tt> elements in the
* each setting value according to the following logic:
* <p>
* First, tries to resolve it against a System property ({@link System#getProperty(String)}), next,
* tries and resolve it against an environment variable ({@link System#getenv(String)}), and last, tries
* and replace it with another setting already set on this builder.
*/ */
public Builder replacePropertyPlaceholders() { public Builder replacePropertyPlaceholders() {
return replacePropertyPlaceholders(System::getenv);
}
// visible for testing
Builder replacePropertyPlaceholders(Function<String, String> getenv) {
PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false); PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false);
PropertyPlaceholder.PlaceholderResolver placeholderResolver = new PropertyPlaceholder.PlaceholderResolver() { PropertyPlaceholder.PlaceholderResolver placeholderResolver = new PropertyPlaceholder.PlaceholderResolver() {
@Override @Override
public String resolvePlaceholder(String placeholderName) { public String resolvePlaceholder(String placeholderName) {
if (placeholderName.startsWith("env.")) { final String value = getenv.apply(placeholderName);
// explicit env var prefix
return System.getenv(placeholderName.substring("env.".length()));
}
String value = System.getProperty(placeholderName);
if (value != null) {
return value;
}
value = System.getenv(placeholderName);
if (value != null) { if (value != null) {
return value; return value;
} }
@ -1010,8 +978,7 @@ public final class Settings implements ToXContent {
@Override @Override
public boolean shouldIgnoreMissing(String placeholderName) { public boolean shouldIgnoreMissing(String placeholderName) {
// if its an explicit env var, we are ok with not having a value for it and treat it as optional if (placeholderName.startsWith("prompt.")) {
if (placeholderName.startsWith("env.") || placeholderName.startsWith("prompt.")) {
return true; return true;
} }
return false; return false;

View File

@ -65,7 +65,12 @@ public class SettingsModule extends AbstractModule {
protected void configure() { protected void configure() {
final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values()));
final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values())); final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values()));
Settings indexSettings = settings.filter((s) -> s.startsWith("index.") && clusterSettings.get(s) == null); Settings indexSettings = settings.filter((s) -> (s.startsWith("index.") &&
// special case - we want to get Did you mean indices.query.bool.max_clause_count
// which means we need to by-pass this check for this setting
// TODO remove in 6.0!!
"index.query.bool.max_clause_count".equals(s) == false)
&& clusterSettings.get(s) == null);
if (indexSettings.isEmpty() == false) { if (indexSettings.isEmpty() == false) {
try { try {
String separator = IntStream.range(0, 85).mapToObj(s -> "*").collect(Collectors.joining("")).trim(); String separator = IntStream.range(0, 85).mapToObj(s -> "*").collect(Collectors.joining("")).trim();

View File

@ -1,629 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.hash.MurmurHash3;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.SizeValue;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
/**
* A bloom filter. Inspired by Guava bloom filter implementation though with some optimizations.
*/
public class BloomFilter {
/**
* A factory that can use different fpp based on size.
*/
public static class Factory {
public static final Factory DEFAULT = buildDefault();
private static Factory buildDefault() {
// Some numbers:
// 10k =0.001: 140.4kb , 10 Hashes
// 10k =0.01 : 93.6kb , 6 Hashes
// 100k=0.01 : 936.0kb , 6 Hashes
// 100k=0.03 : 712.7kb , 5 Hashes
// 500k=0.01 : 4.5mb , 6 Hashes
// 500k=0.03 : 3.4mb , 5 Hashes
// 500k=0.05 : 2.9mb , 4 Hashes
// 1m=0.01 : 9.1mb , 6 Hashes
// 1m=0.03 : 6.9mb , 5 Hashes
// 1m=0.05 : 5.9mb , 4 Hashes
// 5m=0.01 : 45.7mb , 6 Hashes
// 5m=0.03 : 34.8mb , 5 Hashes
// 5m=0.05 : 29.7mb , 4 Hashes
// 50m=0.01 : 457.0mb , 6 Hashes
// 50m=0.03 : 297.3mb , 4 Hashes
// 50m=0.10 : 228.5mb , 3 Hashes
return buildFromString("10k=0.01,1m=0.03");
}
/**
* Supports just passing fpp, as in "0.01", and also ranges, like "50k=0.01,1m=0.05". If
* its null, returns {@link #buildDefault()}.
*/
public static Factory buildFromString(@Nullable String config) {
if (config == null) {
return buildDefault();
}
String[] sEntries = config.split(",");
if (sEntries.length == 0) {
if (config.length() > 0) {
return new Factory(new Entry[]{new Entry(0, Double.parseDouble(config))});
}
return buildDefault();
}
Entry[] entries = new Entry[sEntries.length];
for (int i = 0; i < sEntries.length; i++) {
int index = sEntries[i].indexOf('=');
entries[i] = new Entry(
(int) SizeValue.parseSizeValue(sEntries[i].substring(0, index).trim()).singles(),
Double.parseDouble(sEntries[i].substring(index + 1).trim())
);
}
return new Factory(entries);
}
private final Entry[] entries;
public Factory(Entry[] entries) {
this.entries = entries;
// the order is from the upper most expected insertions to the lowest
Arrays.sort(this.entries, new Comparator<Entry>() {
@Override
public int compare(Entry o1, Entry o2) {
return o2.expectedInsertions - o1.expectedInsertions;
}
});
}
public BloomFilter createFilter(int expectedInsertions) {
for (Entry entry : entries) {
if (expectedInsertions > entry.expectedInsertions) {
return BloomFilter.create(expectedInsertions, entry.fpp);
}
}
return BloomFilter.create(expectedInsertions, 0.03);
}
public static class Entry {
public final int expectedInsertions;
public final double fpp;
Entry(int expectedInsertions, double fpp) {
this.expectedInsertions = expectedInsertions;
this.fpp = fpp;
}
}
}
/**
* Creates a bloom filter based on the with the expected number
* of insertions and expected false positive probability.
*
* @param expectedInsertions the number of expected insertions to the constructed
* @param fpp the desired false positive probability (must be positive and less than 1.0)
*/
public static BloomFilter create(int expectedInsertions, double fpp) {
return create(expectedInsertions, fpp, -1);
}
/**
* Creates a bloom filter based on the expected number of insertions, expected false positive probability,
* and number of hash functions.
*
* @param expectedInsertions the number of expected insertions to the constructed
* @param fpp the desired false positive probability (must be positive and less than 1.0)
* @param numHashFunctions the number of hash functions to use (must be less than or equal to 255)
*/
public static BloomFilter create(int expectedInsertions, double fpp, int numHashFunctions) {
if (expectedInsertions == 0) {
expectedInsertions = 1;
}
/*
* TODO(user): Put a warning in the javadoc about tiny fpp values,
* since the resulting size is proportional to -log(p), but there is not
* much of a point after all, e.g. optimalM(1000, 0.0000000000000001) = 76680
* which is less that 10kb. Who cares!
*/
long numBits = optimalNumOfBits(expectedInsertions, fpp);
// calculate the optimal number of hash functions
if (numHashFunctions == -1) {
numHashFunctions = optimalNumOfHashFunctions(expectedInsertions, numBits);
}
try {
return new BloomFilter(new BitArray(numBits), numHashFunctions, Hashing.DEFAULT);
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Could not create BloomFilter of " + numBits + " bits", e);
}
}
public static void skipBloom(IndexInput in) throws IOException {
int version = in.readInt(); // we do nothing with this now..., defaults to 0
final int numLongs = in.readInt();
in.seek(in.getFilePointer() + (numLongs * 8) + 4 + 4); // filter + numberOfHashFunctions + hashType
}
public static BloomFilter deserialize(DataInput in) throws IOException {
int version = in.readInt(); // we do nothing with this now..., defaults to 0
int numLongs = in.readInt();
long[] data = new long[numLongs];
for (int i = 0; i < numLongs; i++) {
data[i] = in.readLong();
}
int numberOfHashFunctions = in.readInt();
int hashType = in.readInt();
return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType));
}
public static void serilaize(BloomFilter filter, DataOutput out) throws IOException {
out.writeInt(0); // version
BitArray bits = filter.bits;
out.writeInt(bits.data.length);
for (long l : bits.data) {
out.writeLong(l);
}
out.writeInt(filter.numHashFunctions);
out.writeInt(filter.hashing.type()); // hashType
}
public static BloomFilter readFrom(StreamInput in) throws IOException {
int version = in.readVInt(); // we do nothing with this now..., defaults to 0
int numLongs = in.readVInt();
long[] data = new long[numLongs];
for (int i = 0; i < numLongs; i++) {
data[i] = in.readLong();
}
int numberOfHashFunctions = in.readVInt();
int hashType = in.readVInt(); // again, nothing to do now...
return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType));
}
public static void writeTo(BloomFilter filter, StreamOutput out) throws IOException {
out.writeVInt(0); // version
BitArray bits = filter.bits;
out.writeVInt(bits.data.length);
for (long l : bits.data) {
out.writeLong(l);
}
out.writeVInt(filter.numHashFunctions);
out.writeVInt(filter.hashing.type()); // hashType
}
/**
* The bit set of the BloomFilter (not necessarily power of 2!)
*/
final BitArray bits;
/**
* Number of hashes per element
*/
final int numHashFunctions;
final Hashing hashing;
BloomFilter(BitArray bits, int numHashFunctions, Hashing hashing) {
this.bits = bits;
this.numHashFunctions = numHashFunctions;
this.hashing = hashing;
/*
* This only exists to forbid BFs that cannot use the compact persistent representation.
* If it ever throws, at a user who was not intending to use that representation, we should
* reconsider
*/
if (numHashFunctions > 255) {
throw new IllegalArgumentException("Currently we don't allow BloomFilters that would use more than 255 hash functions");
}
}
public boolean put(BytesRef value) {
return hashing.put(value, numHashFunctions, bits);
}
public boolean mightContain(BytesRef value) {
return hashing.mightContain(value, numHashFunctions, bits);
}
public int getNumHashFunctions() {
return this.numHashFunctions;
}
public long getSizeInBytes() {
return bits.ramBytesUsed();
}
@Override
public int hashCode() {
return bits.hashCode() + numHashFunctions;
}
/*
* Cheat sheet:
*
* m: total bits
* n: expected insertions
* b: m/n, bits per insertion
* p: expected false positive probability
*
* 1) Optimal k = b * ln2
* 2) p = (1 - e ^ (-kn/m))^k
* 3) For optimal k: p = 2 ^ (-k) ~= 0.6185^b
* 4) For optimal k: m = -nlnp / ((ln2) ^ 2)
*/
/**
* Computes the optimal k (number of hashes per element inserted in Bloom filter), given the
* expected insertions and total number of bits in the Bloom filter.
* <p>
* See http://en.wikipedia.org/wiki/File:Bloom_filter_fp_probability.svg for the formula.
*
* @param n expected insertions (must be positive)
* @param m total number of bits in Bloom filter (must be positive)
*/
static int optimalNumOfHashFunctions(long n, long m) {
return Math.max(1, (int) Math.round(m / n * Math.log(2)));
}
/**
* Computes m (total bits of Bloom filter) which is expected to achieve, for the specified
* expected insertions, the required false positive probability.
* <p>
* See http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives for the formula.
*
* @param n expected insertions (must be positive)
* @param p false positive rate (must be 0 &lt; p &lt; 1)
*/
static long optimalNumOfBits(long n, double p) {
if (p == 0) {
p = Double.MIN_VALUE;
}
return (long) (-n * Math.log(p) / (Math.log(2) * Math.log(2)));
}
// Note: We use this instead of java.util.BitSet because we need access to the long[] data field
static final class BitArray {
final long[] data;
final long bitSize;
long bitCount;
BitArray(long bits) {
this(new long[size(bits)]);
}
private static int size(long bits) {
long quotient = bits / 64;
long remainder = bits - quotient * 64;
return Math.toIntExact(remainder == 0 ? quotient : 1 + quotient);
}
// Used by serialization
BitArray(long[] data) {
this.data = data;
long bitCount = 0;
for (long value : data) {
bitCount += Long.bitCount(value);
}
this.bitCount = bitCount;
this.bitSize = data.length * Long.SIZE;
}
/** Returns true if the bit changed value. */
boolean set(long index) {
if (!get(index)) {
data[(int) (index >>> 6)] |= (1L << index);
bitCount++;
return true;
}
return false;
}
boolean get(long index) {
return (data[(int) (index >>> 6)] & (1L << index)) != 0;
}
/** Number of bits */
long bitSize() {
return bitSize;
}
/** Number of set bits (1s) */
long bitCount() {
return bitCount;
}
BitArray copy() {
return new BitArray(data.clone());
}
/** Combines the two BitArrays using bitwise OR. */
void putAll(BitArray array) {
bitCount = 0;
for (int i = 0; i < data.length; i++) {
data[i] |= array.data[i];
bitCount += Long.bitCount(data[i]);
}
}
@Override public boolean equals(Object o) {
if (o instanceof BitArray) {
BitArray bitArray = (BitArray) o;
return Arrays.equals(data, bitArray.data);
}
return false;
}
@Override public int hashCode() {
return Arrays.hashCode(data);
}
public long ramBytesUsed() {
return Long.BYTES * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16;
}
}
static enum Hashing {
V0() {
@Override
protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) {
long bitSize = bits.bitSize();
long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0);
int hash1 = (int) hash64;
int hash2 = (int) (hash64 >>> 32);
boolean bitsChanged = false;
for (int i = 1; i <= numHashFunctions; i++) {
int nextHash = hash1 + i * hash2;
if (nextHash < 0) {
nextHash = ~nextHash;
}
bitsChanged |= bits.set(nextHash % bitSize);
}
return bitsChanged;
}
@Override
protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) {
long bitSize = bits.bitSize();
long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0);
int hash1 = (int) hash64;
int hash2 = (int) (hash64 >>> 32);
for (int i = 1; i <= numHashFunctions; i++) {
int nextHash = hash1 + i * hash2;
if (nextHash < 0) {
nextHash = ~nextHash;
}
if (!bits.get(nextHash % bitSize)) {
return false;
}
}
return true;
}
@Override
protected int type() {
return 0;
}
},
V1() {
@Override
protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) {
long bitSize = bits.bitSize();
MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128());
boolean bitsChanged = false;
long combinedHash = hash128.h1;
for (int i = 0; i < numHashFunctions; i++) {
// Make the combined hash positive and indexable
bitsChanged |= bits.set((combinedHash & Long.MAX_VALUE) % bitSize);
combinedHash += hash128.h2;
}
return bitsChanged;
}
@Override
protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) {
long bitSize = bits.bitSize();
MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128());
long combinedHash = hash128.h1;
for (int i = 0; i < numHashFunctions; i++) {
// Make the combined hash positive and indexable
if (!bits.get((combinedHash & Long.MAX_VALUE) % bitSize)) {
return false;
}
combinedHash += hash128.h2;
}
return true;
}
@Override
protected int type() {
return 1;
}
}
;
protected abstract boolean put(BytesRef value, int numHashFunctions, BitArray bits);
protected abstract boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits);
protected abstract int type();
public static final Hashing DEFAULT = Hashing.V1;
public static Hashing fromType(int type) {
if (type == 0) {
return Hashing.V0;
} if (type == 1) {
return Hashing.V1;
} else {
throw new IllegalArgumentException("no hashing type matching " + type);
}
}
}
// START : MURMUR 3_128 USED FOR Hashing.V0
// NOTE: don't replace this code with the o.e.common.hashing.MurmurHash3 method which returns a different hash
protected static long getblock(byte[] key, int offset, int index) {
int i_8 = index << 3;
int blockOffset = offset + i_8;
return ((long) key[blockOffset + 0] & 0xff) + (((long) key[blockOffset + 1] & 0xff) << 8) +
(((long) key[blockOffset + 2] & 0xff) << 16) + (((long) key[blockOffset + 3] & 0xff) << 24) +
(((long) key[blockOffset + 4] & 0xff) << 32) + (((long) key[blockOffset + 5] & 0xff) << 40) +
(((long) key[blockOffset + 6] & 0xff) << 48) + (((long) key[blockOffset + 7] & 0xff) << 56);
}
protected static long rotl64(long v, int n) {
return ((v << n) | (v >>> (64 - n)));
}
protected static long fmix(long k) {
k ^= k >>> 33;
k *= 0xff51afd7ed558ccdL;
k ^= k >>> 33;
k *= 0xc4ceb9fe1a85ec53L;
k ^= k >>> 33;
return k;
}
@SuppressWarnings("fallthrough") // Uses fallthrough to implement a well know hashing algorithm
public static long hash3_x64_128(byte[] key, int offset, int length, long seed) {
final int nblocks = length >> 4; // Process as 128-bit blocks.
long h1 = seed;
long h2 = seed;
long c1 = 0x87c37b91114253d5L;
long c2 = 0x4cf5ad432745937fL;
//----------
// body
for (int i = 0; i < nblocks; i++) {
long k1 = getblock(key, offset, i * 2 + 0);
long k2 = getblock(key, offset, i * 2 + 1);
k1 *= c1;
k1 = rotl64(k1, 31);
k1 *= c2;
h1 ^= k1;
h1 = rotl64(h1, 27);
h1 += h2;
h1 = h1 * 5 + 0x52dce729;
k2 *= c2;
k2 = rotl64(k2, 33);
k2 *= c1;
h2 ^= k2;
h2 = rotl64(h2, 31);
h2 += h1;
h2 = h2 * 5 + 0x38495ab5;
}
//----------
// tail
// Advance offset to the unprocessed tail of the data.
offset += nblocks * 16;
long k1 = 0;
long k2 = 0;
switch (length & 15) {
case 15:
k2 ^= ((long) key[offset + 14]) << 48;
case 14:
k2 ^= ((long) key[offset + 13]) << 40;
case 13:
k2 ^= ((long) key[offset + 12]) << 32;
case 12:
k2 ^= ((long) key[offset + 11]) << 24;
case 11:
k2 ^= ((long) key[offset + 10]) << 16;
case 10:
k2 ^= ((long) key[offset + 9]) << 8;
case 9:
k2 ^= ((long) key[offset + 8]) << 0;
k2 *= c2;
k2 = rotl64(k2, 33);
k2 *= c1;
h2 ^= k2;
case 8:
k1 ^= ((long) key[offset + 7]) << 56;
case 7:
k1 ^= ((long) key[offset + 6]) << 48;
case 6:
k1 ^= ((long) key[offset + 5]) << 40;
case 5:
k1 ^= ((long) key[offset + 4]) << 32;
case 4:
k1 ^= ((long) key[offset + 3]) << 24;
case 3:
k1 ^= ((long) key[offset + 2]) << 16;
case 2:
k1 ^= ((long) key[offset + 1]) << 8;
case 1:
k1 ^= (key[offset]);
k1 *= c1;
k1 = rotl64(k1, 31);
k1 *= c2;
h1 ^= k1;
}
//----------
// finalization
h1 ^= length;
h2 ^= length;
h1 += h2;
h2 += h1;
h1 = fmix(h1);
h2 = fmix(h2);
h1 += h2;
h2 += h1;
//return (new long[]{h1, h2});
// SAME AS GUAVA, they take the first long out of the 128bit
return h1;
}
// END: MURMUR 3_128
}

View File

@ -108,7 +108,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
currentNode, nodeWithHighestMatch); currentNode, nodeWithHighestMatch);
it.moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.REALLOCATED_REPLICA, it.moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.REALLOCATED_REPLICA,
"existing allocation of replica to [" + currentNode + "] cancelled, sync id match found on node [" + nodeWithHighestMatch + "]", "existing allocation of replica to [" + currentNode + "] cancelled, sync id match found on node [" + nodeWithHighestMatch + "]",
null, allocation.getCurrentNanoTime(), System.currentTimeMillis())); null, 0, allocation.getCurrentNanoTime(), System.currentTimeMillis()));
changed = true; changed = true;
} }
} }

View File

@ -50,7 +50,6 @@ import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.percolator.PercolatorQueryCache;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexEventListener;
@ -151,11 +150,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
this.indexStore = indexStore; this.indexStore = indexStore;
indexFieldData.setListener(new FieldDataCacheListener(this)); indexFieldData.setListener(new FieldDataCacheListener(this));
this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this)); this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this));
PercolatorQueryCache percolatorQueryCache = new PercolatorQueryCache(indexSettings, IndexService.this::newQueryShardContext);
this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool, this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool,
bitsetFilterCache.createListener(threadPool), bitsetFilterCache.createListener(threadPool));
percolatorQueryCache.createListener(threadPool)); this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache);
this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache, percolatorQueryCache);
this.engineFactory = engineFactory; this.engineFactory = engineFactory;
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
this.searcherWrapper = wrapperFactory.newWrapper(this); this.searcherWrapper = wrapperFactory.newWrapper(this);
@ -239,8 +236,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
} }
} }
} finally { } finally {
IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask, IOUtils.close(bitsetFilterCache, indexCache, indexFieldData, analysisService, refreshTask, fsyncTask);
cache().getPercolatorQueryCache());
} }
} }
} }
@ -443,7 +439,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
return new QueryShardContext( return new QueryShardContext(
indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(),
similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(),
nodeServicesProvider.getClient(), indexCache.getPercolatorQueryCache(), indexReader, nodeServicesProvider.getClient(), indexReader,
nodeServicesProvider.getClusterService().state() nodeServicesProvider.getClusterService().state()
); );
} }

View File

@ -321,7 +321,7 @@ public final class AnalysisRegistry implements Closeable {
if (currentSettings.get("tokenizer") != null) { if (currentSettings.get("tokenizer") != null) {
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings); factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings);
} else { } else {
throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it"); throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer");
} }
} else if (typeName.equals("custom")) { } else if (typeName.equals("custom")) {
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings); factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings);
@ -335,7 +335,7 @@ public final class AnalysisRegistry implements Closeable {
factories.put(name, factory); factories.put(name, factory);
} else { } else {
if (typeName == null) { if (typeName == null) {
throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it"); throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer");
} }
AnalysisModule.AnalysisProvider<T> type = providerMap.get(typeName); AnalysisModule.AnalysisProvider<T> type = providerMap.get(typeName);
if (type == null) { if (type == null) {

View File

@ -33,13 +33,11 @@ import org.apache.lucene.analysis.util.CharArraySet;
public final class FingerprintAnalyzer extends Analyzer { public final class FingerprintAnalyzer extends Analyzer {
private final char separator; private final char separator;
private final int maxOutputSize; private final int maxOutputSize;
private final boolean preserveOriginal;
private final CharArraySet stopWords; private final CharArraySet stopWords;
public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize, boolean preserveOriginal) { public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize) {
this.separator = separator; this.separator = separator;
this.maxOutputSize = maxOutputSize; this.maxOutputSize = maxOutputSize;
this.preserveOriginal = preserveOriginal;
this.stopWords = stopWords; this.stopWords = stopWords;
} }
@ -48,7 +46,7 @@ public final class FingerprintAnalyzer extends Analyzer {
final Tokenizer tokenizer = new StandardTokenizer(); final Tokenizer tokenizer = new StandardTokenizer();
TokenStream stream = tokenizer; TokenStream stream = tokenizer;
stream = new LowerCaseFilter(stream); stream = new LowerCaseFilter(stream);
stream = new ASCIIFoldingFilter(stream, preserveOriginal); stream = new ASCIIFoldingFilter(stream, false);
stream = new StopFilter(stream, stopWords); stream = new StopFilter(stream, stopWords);
stream = new FingerprintFilter(stream, maxOutputSize, separator); stream = new FingerprintFilter(stream, maxOutputSize, separator);
return new TokenStreamComponents(tokenizer, stream); return new TokenStreamComponents(tokenizer, stream);

View File

@ -34,10 +34,8 @@ import org.elasticsearch.index.IndexSettings;
public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyzer> { public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyzer> {
public static ParseField MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.MAX_OUTPUT_SIZE; public static ParseField MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.MAX_OUTPUT_SIZE;
public static ParseField PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.PRESERVE_ORIGINAL;
public static int DEFAULT_MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.DEFAULT_MAX_OUTPUT_SIZE; public static int DEFAULT_MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.DEFAULT_MAX_OUTPUT_SIZE;
public static boolean DEFAULT_PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.DEFAULT_PRESERVE_ORIGINAL;
public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET; public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET;
private final FingerprintAnalyzer analyzer; private final FingerprintAnalyzer analyzer;
@ -47,10 +45,9 @@ public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider<A
char separator = FingerprintTokenFilterFactory.parseSeparator(settings); char separator = FingerprintTokenFilterFactory.parseSeparator(settings);
int maxOutputSize = settings.getAsInt(MAX_OUTPUT_SIZE.getPreferredName(),DEFAULT_MAX_OUTPUT_SIZE); int maxOutputSize = settings.getAsInt(MAX_OUTPUT_SIZE.getPreferredName(),DEFAULT_MAX_OUTPUT_SIZE);
boolean preserveOriginal = settings.getAsBoolean(PRESERVE_ORIGINAL.getPreferredName(), DEFAULT_PRESERVE_ORIGINAL);
CharArraySet stopWords = Analysis.parseStopWords(env, settings, DEFAULT_STOP_WORDS); CharArraySet stopWords = Analysis.parseStopWords(env, settings, DEFAULT_STOP_WORDS);
this.analyzer = new FingerprintAnalyzer(stopWords, separator, maxOutputSize, preserveOriginal); this.analyzer = new FingerprintAnalyzer(stopWords, separator, maxOutputSize);
} }
@Override @Override

View File

@ -24,7 +24,6 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.QueryCache; import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.percolator.PercolatorQueryCache;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -36,14 +35,11 @@ public class IndexCache extends AbstractIndexComponent implements Closeable {
private final QueryCache queryCache; private final QueryCache queryCache;
private final BitsetFilterCache bitsetFilterCache; private final BitsetFilterCache bitsetFilterCache;
private final PercolatorQueryCache percolatorQueryCache;
public IndexCache(IndexSettings indexSettings, QueryCache queryCache, BitsetFilterCache bitsetFilterCache, public IndexCache(IndexSettings indexSettings, QueryCache queryCache, BitsetFilterCache bitsetFilterCache) {
PercolatorQueryCache percolatorQueryCache) {
super(indexSettings); super(indexSettings);
this.queryCache = queryCache; this.queryCache = queryCache;
this.bitsetFilterCache = bitsetFilterCache; this.bitsetFilterCache = bitsetFilterCache;
this.percolatorQueryCache = percolatorQueryCache;
} }
public QueryCache query() { public QueryCache query() {
@ -57,13 +53,9 @@ public class IndexCache extends AbstractIndexComponent implements Closeable {
return bitsetFilterCache; return bitsetFilterCache;
} }
public PercolatorQueryCache getPercolatorQueryCache() {
return percolatorQueryCache;
}
@Override @Override
public void close() throws IOException { public void close() throws IOException {
IOUtils.close(queryCache, bitsetFilterCache, percolatorQueryCache); IOUtils.close(queryCache, bitsetFilterCache);
} }
public void clear(String reason) { public void clear(String reason) {

View File

@ -23,11 +23,13 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
@ -36,6 +38,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper; import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.KeywordFieldMapper; import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
import org.elasticsearch.index.query.PercolateQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
@ -50,14 +53,17 @@ import java.util.Map;
public class PercolatorFieldMapper extends FieldMapper { public class PercolatorFieldMapper extends FieldMapper {
public final static XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE;
public final static Setting<Boolean> INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING =
Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope);
@Deprecated @Deprecated
public static final String LEGACY_TYPE_NAME = ".percolator"; public static final String LEGACY_TYPE_NAME = ".percolator";
public static final String CONTENT_TYPE = "percolator"; public static final String CONTENT_TYPE = "percolator";
private static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); private static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType();
private static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; public static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms";
private static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query"; public static final String UNKNOWN_QUERY_FIELD_NAME = "unknown_query";
static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; public static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field";
public static class Builder extends FieldMapper.Builder<Builder, PercolatorFieldMapper> { public static class Builder extends FieldMapper.Builder<Builder, PercolatorFieldMapper> {
@ -172,7 +178,7 @@ public class PercolatorFieldMapper extends FieldMapper {
this.queryTermsField = queryTermsField; this.queryTermsField = queryTermsField;
this.unknownQueryField = unknownQueryField; this.unknownQueryField = unknownQueryField;
this.queryBuilderField = queryBuilderField; this.queryBuilderField = queryBuilderField;
this.mapUnmappedFieldAsString = PercolatorQueryCache.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); this.mapUnmappedFieldAsString = INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings);
} }
@Override @Override
@ -196,7 +202,7 @@ public class PercolatorFieldMapper extends FieldMapper {
// Fetching of terms, shapes and indexed scripts happen during this rewrite: // Fetching of terms, shapes and indexed scripts happen during this rewrite:
queryBuilder = queryBuilder.rewrite(queryShardContext); queryBuilder = queryBuilder.rewrite(queryShardContext);
try (XContentBuilder builder = XContentFactory.contentBuilder(PercolatorQueryCache.QUERY_BUILDER_CONTENT_TYPE)) { try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) {
queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap()));
builder.flush(); builder.flush();
byte[] queryBuilderAsBytes = builder.bytes().toBytes(); byte[] queryBuilderAsBytes = builder.bytes().toBytes();

View File

@ -40,6 +40,7 @@ import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext; import org.elasticsearch.search.internal.SubSearchContext;
import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -69,8 +70,8 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
} }
List<LeafReaderContext> ctxs = context.searcher().getIndexReader().leaves(); List<LeafReaderContext> ctxs = context.searcher().getIndexReader().leaves();
PercolatorQueryCache queriesRegistry = context.percolatorQueryCache();
IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher();
PercolateQuery.QueryStore queryStore = percolateQuery.getQueryStore();
LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0); LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0);
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
@ -78,9 +79,14 @@ public class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
createSubSearchContext(context, percolatorLeafReaderContext, percolateQuery.getDocumentSource()); createSubSearchContext(context, percolatorLeafReaderContext, percolateQuery.getDocumentSource());
for (InternalSearchHit hit : hits) { for (InternalSearchHit hit : hits) {
final Query query;
try {
LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs));
int segmentDocId = hit.docId() - ctx.docBase; int segmentDocId = hit.docId() - ctx.docBase;
Query query = queriesRegistry.getQueries(ctx).getQuery(segmentDocId); query = queryStore.getQueries(ctx).getQuery(segmentDocId);
} catch (IOException e) {
throw new RuntimeException(e);
}
if (query != null) { if (query != null) {
subSearchContext.parsedQuery(new ParsedQuery(query)); subSearchContext.parsedQuery(new ParsedQuery(query));
hitContext.reset( hitContext.reset(

View File

@ -1,294 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.percolator;
import com.carrotsearch.hppc.IntObjectHashMap;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexWarmer;
import org.elasticsearch.index.IndexWarmer.TerminationHandle;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.query.PercolateQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.function.Supplier;
import static org.elasticsearch.index.percolator.PercolatorFieldMapper.LEGACY_TYPE_NAME;
import static org.elasticsearch.index.percolator.PercolatorFieldMapper.PercolatorFieldType;
import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery;
public final class PercolatorQueryCache extends AbstractIndexComponent
implements Closeable, LeafReader.CoreClosedListener, PercolateQuery.QueryRegistry {
public final static Setting<Boolean> INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING =
Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope);
public final static XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE;
private final Supplier<QueryShardContext> queryShardContextSupplier;
private final Cache<Object, QueriesLeaf> cache;
private final boolean mapUnmappedFieldsAsString;
public PercolatorQueryCache(IndexSettings indexSettings, Supplier<QueryShardContext> queryShardContextSupplier) {
super(indexSettings);
this.queryShardContextSupplier = queryShardContextSupplier;
cache = CacheBuilder.<Object, QueriesLeaf>builder().build();
this.mapUnmappedFieldsAsString = indexSettings.getValue(INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING);
}
@Override
public Leaf getQueries(LeafReaderContext ctx) {
QueriesLeaf percolatorQueries = cache.get(ctx.reader().getCoreCacheKey());
if (percolatorQueries == null) {
throw new IllegalStateException("queries not loaded, queries should be have been preloaded during index warming...");
}
return percolatorQueries;
}
public IndexWarmer.Listener createListener(ThreadPool threadPool) {
return new IndexWarmer.Listener() {
final Executor executor = threadPool.executor(ThreadPool.Names.WARMER);
@Override
public TerminationHandle warmReader(IndexShard indexShard, Searcher searcher) {
final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size());
for (final LeafReaderContext ctx : searcher.reader().leaves()) {
if (cache.get(ctx.reader().getCoreCacheKey()) != null) {
latch.countDown();
continue;
}
executor.execute(() -> {
try {
final long start = System.nanoTime();
QueriesLeaf queries = loadQueries(ctx, indexShard);
cache.put(ctx.reader().getCoreCacheKey(), queries);
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace(
"loading percolator queries took [{}]",
TimeValue.timeValueNanos(System.nanoTime() - start)
);
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to load percolator queries", t);
} finally {
latch.countDown();
}
});
}
return () -> latch.await();
}
};
}
QueriesLeaf loadQueries(LeafReaderContext context, IndexShard indexShard) throws IOException {
Version indexVersionCreated = indexShard.indexSettings().getIndexVersionCreated();
MapperService mapperService = indexShard.mapperService();
LeafReader leafReader = context.reader();
ShardId shardId = ShardUtils.extractShardId(leafReader);
if (shardId == null) {
throw new IllegalStateException("can't resolve shard id");
}
if (indexSettings.getIndex().equals(shardId.getIndex()) == false) {
// percolator cache insanity
String message = "Trying to load queries for index " + shardId.getIndex() + " with cache of index " +
indexSettings.getIndex();
throw new IllegalStateException(message);
}
IntObjectHashMap<Query> queries = new IntObjectHashMap<>();
boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0_alpha1);
if (legacyLoading) {
PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, LEGACY_TYPE_NAME), PostingsEnum.NONE);
if (postings != null) {
LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor();
for (int docId = postings.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = postings.nextDoc()) {
leafReader.document(docId, visitor);
queries.put(docId, parseLegacyPercolatorDocument(docId, visitor.source));
visitor.source = null; // reset
}
}
} else {
// Each type can have one percolator field mapper,
// So for each type we check if there is a percolator field mapper
// and parse all the queries for the documents of that type.
IndexSearcher indexSearcher = new IndexSearcher(leafReader);
for (DocumentMapper documentMapper : mapperService.docMappers(false)) {
Weight queryWeight = indexSearcher.createNormalizedWeight(documentMapper.typeFilter(), false);
for (FieldMapper fieldMapper : documentMapper.mappers()) {
if (fieldMapper instanceof PercolatorFieldMapper) {
PercolatorFieldType fieldType = (PercolatorFieldType) fieldMapper.fieldType();
BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName());
if (binaryDocValues != null) {
// use the same leaf reader context the indexSearcher is using too:
Scorer scorer = queryWeight.scorer(leafReader.getContext());
if (scorer != null) {
DocIdSetIterator iterator = scorer.iterator();
for (int docId = iterator.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) {
BytesRef qbSource = binaryDocValues.get(docId);
if (qbSource.length > 0) {
queries.put(docId, parseQueryBuilder(docId, qbSource));
}
}
}
}
break;
}
}
}
}
leafReader.addCoreClosedListener(this);
return new QueriesLeaf(shardId, queries);
}
private Query parseQueryBuilder(int docId, BytesRef qbSource) {
XContent xContent = QUERY_BUILDER_CONTENT_TYPE.xContent();
try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) {
QueryShardContext context = queryShardContextSupplier.get();
return parseQuery(context, mapUnmappedFieldsAsString, sourceParser);
} catch (IOException e) {
throw new PercolatorException(index(), "failed to parse query builder for document [" + docId + "]", e);
}
}
private Query parseLegacyPercolatorDocument(int docId, BytesReference source) {
try (XContentParser sourceParser = XContentHelper.createParser(source)) {
String currentFieldName = null;
XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT");
}
while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = sourceParser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
QueryShardContext context = queryShardContextSupplier.get();
return parseQuery(context, mapUnmappedFieldsAsString, sourceParser);
} else {
sourceParser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
sourceParser.skipChildren();
}
}
} catch (Exception e) {
throw new PercolatorException(index(), "failed to parse query [" + docId + "]", e);
}
return null;
}
public PercolatorQueryCacheStats getStats(ShardId shardId) {
int numberOfQueries = 0;
for (QueriesLeaf queries : cache.values()) {
if (shardId.equals(queries.shardId)) {
numberOfQueries += queries.queries.size();
}
}
return new PercolatorQueryCacheStats(numberOfQueries);
}
@Override
public void onClose(Object cacheKey) throws IOException {
cache.invalidate(cacheKey);
}
@Override
public void close() throws IOException {
cache.invalidateAll();
}
final static class LegacyQueryFieldVisitor extends StoredFieldVisitor {
private BytesArray source;
@Override
public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException {
source = new BytesArray(bytes);
}
@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
if (source != null) {
return Status.STOP;
}
if (SourceFieldMapper.NAME.equals(fieldInfo.name)) {
return Status.YES;
} else {
return Status.NO;
}
}
}
final static class QueriesLeaf implements Leaf {
final ShardId shardId;
final IntObjectHashMap<Query> queries;
QueriesLeaf(ShardId shardId, IntObjectHashMap<Query> queries) {
this.shardId = shardId;
this.queries = queries;
}
@Override
public Query getQuery(int docId) {
return queries.get(docId);
}
}
}

View File

@ -1,89 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.percolator;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
/**
* Exposes percolator query cache statistics.
*/
public class PercolatorQueryCacheStats implements Streamable, ToXContent {
private long numQueries;
/**
* Noop constructor for serialization purposes.
*/
public PercolatorQueryCacheStats() {
}
PercolatorQueryCacheStats(long numQueries) {
this.numQueries = numQueries;
}
/**
* @return The total number of loaded percolate queries.
*/
public long getNumQueries() {
return numQueries;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.PERCOLATOR);
builder.field(Fields.QUERIES, getNumQueries());
builder.endObject();
return builder;
}
public void add(PercolatorQueryCacheStats percolate) {
if (percolate == null) {
return;
}
numQueries += percolate.getNumQueries();
}
static final class Fields {
static final String PERCOLATOR = "percolator";
static final String QUERIES = "num_queries";
}
public static PercolatorQueryCacheStats readPercolateStats(StreamInput in) throws IOException {
PercolatorQueryCacheStats stats = new PercolatorQueryCacheStats();
stats.readFrom(in);
return stats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
numQueries = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(numQueries);
}
}

View File

@ -267,7 +267,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
*/ */
public MatchQueryBuilder prefixLength(int prefixLength) { public MatchQueryBuilder prefixLength(int prefixLength) {
if (prefixLength < 0 ) { if (prefixLength < 0 ) {
throw new IllegalArgumentException("No negative prefix length allowed."); throw new IllegalArgumentException("[" + NAME + "] requires prefix length to be non-negative.");
} }
this.prefixLength = prefixLength; this.prefixLength = prefixLength;
return this; return this;
@ -284,8 +284,8 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
* When using fuzzy or prefix type query, the number of term expansions to use. * When using fuzzy or prefix type query, the number of term expansions to use.
*/ */
public MatchQueryBuilder maxExpansions(int maxExpansions) { public MatchQueryBuilder maxExpansions(int maxExpansions) {
if (maxExpansions < 0 ) { if (maxExpansions <= 0 ) {
throw new IllegalArgumentException("No negative maxExpansions allowed."); throw new IllegalArgumentException("[" + NAME + "] requires maxExpansions to be positive.");
} }
this.maxExpansions = maxExpansions; this.maxExpansions = maxExpansions;
return this; return this;

View File

@ -50,7 +50,7 @@ public final class PercolateQuery extends Query implements Accountable {
public static class Builder { public static class Builder {
private final String docType; private final String docType;
private final QueryRegistry queryRegistry; private final QueryStore queryStore;
private final BytesReference documentSource; private final BytesReference documentSource;
private final IndexSearcher percolatorIndexSearcher; private final IndexSearcher percolatorIndexSearcher;
@ -59,15 +59,15 @@ public final class PercolateQuery extends Query implements Accountable {
/** /**
* @param docType The type of the document being percolated * @param docType The type of the document being percolated
* @param queryRegistry The registry holding all the percolator queries as Lucene queries. * @param queryStore The lookup holding all the percolator queries as Lucene queries.
* @param documentSource The source of the document being percolated * @param documentSource The source of the document being percolated
* @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated * @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated
*/ */
public Builder(String docType, QueryRegistry queryRegistry, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) { public Builder(String docType, QueryStore queryStore, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) {
this.docType = Objects.requireNonNull(docType); this.docType = Objects.requireNonNull(docType);
this.documentSource = Objects.requireNonNull(documentSource); this.documentSource = Objects.requireNonNull(documentSource);
this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher);
this.queryRegistry = Objects.requireNonNull(queryRegistry); this.queryStore = Objects.requireNonNull(queryStore);
} }
/** /**
@ -94,7 +94,6 @@ public final class PercolateQuery extends Query implements Accountable {
if (percolateTypeQuery != null && queriesMetaDataQuery != null) { if (percolateTypeQuery != null && queriesMetaDataQuery != null) {
throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata"); throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata");
} }
// The query that selects which percolator queries will be evaluated by MemoryIndex: // The query that selects which percolator queries will be evaluated by MemoryIndex:
BooleanQuery.Builder builder = new BooleanQuery.Builder(); BooleanQuery.Builder builder = new BooleanQuery.Builder();
if (percolateTypeQuery != null) { if (percolateTypeQuery != null) {
@ -103,24 +102,23 @@ public final class PercolateQuery extends Query implements Accountable {
if (queriesMetaDataQuery != null) { if (queriesMetaDataQuery != null) {
builder.add(queriesMetaDataQuery, FILTER); builder.add(queriesMetaDataQuery, FILTER);
} }
return new PercolateQuery(docType, queryStore, documentSource, builder.build(), percolatorIndexSearcher);
return new PercolateQuery(docType, queryRegistry, documentSource, builder.build(), percolatorIndexSearcher);
} }
} }
private final String documentType; private final String documentType;
private final QueryRegistry queryRegistry; private final QueryStore queryStore;
private final BytesReference documentSource; private final BytesReference documentSource;
private final Query percolatorQueriesQuery; private final Query percolatorQueriesQuery;
private final IndexSearcher percolatorIndexSearcher; private final IndexSearcher percolatorIndexSearcher;
private PercolateQuery(String documentType, QueryRegistry queryRegistry, BytesReference documentSource, private PercolateQuery(String documentType, QueryStore queryStore, BytesReference documentSource,
Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher) { Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher) {
this.documentType = documentType; this.documentType = documentType;
this.documentSource = documentSource; this.documentSource = documentSource;
this.percolatorQueriesQuery = percolatorQueriesQuery; this.percolatorQueriesQuery = percolatorQueriesQuery;
this.queryRegistry = queryRegistry; this.queryStore = queryStore;
this.percolatorIndexSearcher = percolatorIndexSearcher; this.percolatorIndexSearcher = percolatorIndexSearcher;
} }
@ -128,7 +126,7 @@ public final class PercolateQuery extends Query implements Accountable {
public Query rewrite(IndexReader reader) throws IOException { public Query rewrite(IndexReader reader) throws IOException {
Query rewritten = percolatorQueriesQuery.rewrite(reader); Query rewritten = percolatorQueriesQuery.rewrite(reader);
if (rewritten != percolatorQueriesQuery) { if (rewritten != percolatorQueriesQuery) {
return new PercolateQuery(documentType, queryRegistry, documentSource, rewritten, percolatorIndexSearcher); return new PercolateQuery(documentType, queryStore, documentSource, rewritten, percolatorIndexSearcher);
} else { } else {
return this; return this;
} }
@ -151,7 +149,7 @@ public final class PercolateQuery extends Query implements Accountable {
if (result == docId) { if (result == docId) {
if (twoPhaseIterator.matches()) { if (twoPhaseIterator.matches()) {
if (needsScores) { if (needsScores) {
QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); QueryStore.Leaf percolatorQueries = queryStore.getQueries(leafReaderContext);
Query query = percolatorQueries.getQuery(docId); Query query = percolatorQueries.getQuery(docId);
Explanation detail = percolatorIndexSearcher.explain(query, 0); Explanation detail = percolatorIndexSearcher.explain(query, 0);
return Explanation.match(scorer.score(), "PercolateQuery", detail); return Explanation.match(scorer.score(), "PercolateQuery", detail);
@ -181,9 +179,9 @@ public final class PercolateQuery extends Query implements Accountable {
return null; return null;
} }
final QueryRegistry.Leaf percolatorQueries = queryRegistry.getQueries(leafReaderContext); final QueryStore.Leaf queries = queryStore.getQueries(leafReaderContext);
if (needsScores) { if (needsScores) {
return new BaseScorer(this, approximation, percolatorQueries, percolatorIndexSearcher) { return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) {
float score; float score;
@ -209,7 +207,7 @@ public final class PercolateQuery extends Query implements Accountable {
} }
}; };
} else { } else {
return new BaseScorer(this, approximation, percolatorQueries, percolatorIndexSearcher) { return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) {
@Override @Override
public float score() throws IOException { public float score() throws IOException {
@ -238,6 +236,10 @@ public final class PercolateQuery extends Query implements Accountable {
return documentSource; return documentSource;
} }
public QueryStore getQueryStore() {
return queryStore;
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
@ -276,13 +278,15 @@ public final class PercolateQuery extends Query implements Accountable {
return sizeInBytes; return sizeInBytes;
} }
public interface QueryRegistry { @FunctionalInterface
public interface QueryStore {
Leaf getQueries(LeafReaderContext ctx); Leaf getQueries(LeafReaderContext ctx) throws IOException;
@FunctionalInterface
interface Leaf { interface Leaf {
Query getQuery(int docId); Query getQuery(int docId) throws IOException;
} }
@ -291,10 +295,10 @@ public final class PercolateQuery extends Query implements Accountable {
static abstract class BaseScorer extends Scorer { static abstract class BaseScorer extends Scorer {
final Scorer approximation; final Scorer approximation;
final QueryRegistry.Leaf percolatorQueries; final QueryStore.Leaf percolatorQueries;
final IndexSearcher percolatorIndexSearcher; final IndexSearcher percolatorIndexSearcher;
BaseScorer(Weight weight, Scorer approximation, QueryRegistry.Leaf percolatorQueries, IndexSearcher percolatorIndexSearcher) { BaseScorer(Weight weight, Scorer approximation, QueryStore.Leaf percolatorQueries, IndexSearcher percolatorIndexSearcher) {
super(weight); super(weight);
this.approximation = approximation; this.approximation = approximation;
this.percolatorQueries = percolatorQueries; this.percolatorQueries = percolatorQueries;

View File

@ -21,10 +21,13 @@ package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.SlowCompositeReaderWrapper; import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
@ -33,23 +36,27 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperForType; import org.elasticsearch.index.mapper.DocumentMapperForType;
@ -57,15 +64,16 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.percolator.PercolatorFieldMapper; import org.elasticsearch.index.percolator.PercolatorFieldMapper;
import org.elasticsearch.index.percolator.PercolatorQueryCache;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import static org.elasticsearch.index.mapper.SourceToParse.source; import static org.elasticsearch.index.mapper.SourceToParse.source;
import static org.elasticsearch.index.percolator.PercolatorFieldMapper.parseQuery;
public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBuilder> { public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBuilder> {
@ -388,16 +396,14 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
docSearcher.setQueryCache(null); docSearcher.setQueryCache(null);
} }
PercolatorQueryCache registry = context.getPercolatorQueryCache(); IndexSettings indexSettings = context.getIndexSettings();
if (registry == null) { boolean mapUnmappedFieldsAsString = indexSettings.getValue(PercolatorFieldMapper.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING);
throw new QueryShardException(context, "no percolator query registry"); return buildQuery(indexSettings.getIndexVersionCreated(), context, docSearcher, mapUnmappedFieldsAsString);
} }
PercolateQuery.Builder builder = new PercolateQuery.Builder( Query buildQuery(Version indexVersionCreated, QueryShardContext context, IndexSearcher docSearcher,
documentType, registry, document, docSearcher boolean mapUnmappedFieldsAsString) throws IOException {
); if (indexVersionCreated.onOrAfter(Version.V_5_0_0_alpha1)) {
Settings indexSettings = registry.getIndexSettings().getSettings();
if (indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null).onOrAfter(Version.V_5_0_0_alpha1)) {
MappedFieldType fieldType = context.fieldMapper(field); MappedFieldType fieldType = context.fieldMapper(field);
if (fieldType == null) { if (fieldType == null) {
throw new QueryShardException(context, "field [" + field + "] does not exist"); throw new QueryShardException(context, "field [" + field + "] does not exist");
@ -408,13 +414,20 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
"] to be of type [percolator], but is of type [" + fieldType.typeName() + "]"); "] to be of type [percolator], but is of type [" + fieldType.typeName() + "]");
} }
PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) fieldType; PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) fieldType;
PercolateQuery.Builder builder = new PercolateQuery.Builder(
documentType, createStore(pft, context, mapUnmappedFieldsAsString), document, docSearcher
);
builder.extractQueryTermsQuery(pft.getExtractedTermsField(), pft.getUnknownQueryFieldName()); builder.extractQueryTermsQuery(pft.getExtractedTermsField(), pft.getUnknownQueryFieldName());
return builder.build();
} else { } else {
Query percolateTypeQuery = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME)); Query percolateTypeQuery = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.LEGACY_TYPE_NAME));
PercolateQuery.Builder builder = new PercolateQuery.Builder(
documentType, createLegacyStore(context, mapUnmappedFieldsAsString), document, docSearcher
);
builder.setPercolateTypeQuery(percolateTypeQuery); builder.setPercolateTypeQuery(percolateTypeQuery);
}
return builder.build(); return builder.build();
} }
}
public String getField() { public String getField() {
return field; return field;
@ -459,4 +472,91 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
} }
} }
private static PercolateQuery.QueryStore createStore(PercolatorFieldMapper.PercolatorFieldType fieldType,
QueryShardContext context,
boolean mapUnmappedFieldsAsString) {
return ctx -> {
LeafReader leafReader = ctx.reader();
BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName());
if (binaryDocValues == null) {
return docId -> null;
}
Bits bits = leafReader.getDocsWithField(fieldType.getQueryBuilderFieldName());
return docId -> {
if (bits.get(docId)) {
BytesRef qbSource = binaryDocValues.get(docId);
if (qbSource.length > 0) {
XContent xContent = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent();
try (XContentParser sourceParser = xContent.createParser(qbSource.bytes, qbSource.offset, qbSource.length)) {
return parseQuery(context, mapUnmappedFieldsAsString, sourceParser);
}
} else {
return null;
}
} else {
return null;
}
};
};
}
private static PercolateQuery.QueryStore createLegacyStore(QueryShardContext context, boolean mapUnmappedFieldsAsString) {
return ctx -> {
LeafReader leafReader = ctx.reader();
return docId -> {
LegacyQueryFieldVisitor visitor = new LegacyQueryFieldVisitor();
leafReader.document(docId, visitor);
if (visitor.source == null) {
throw new IllegalStateException("No source found for document with docid [" + docId + "]");
}
try (XContentParser sourceParser = XContentHelper.createParser(visitor.source)) {
String currentFieldName = null;
XContentParser.Token token = sourceParser.nextToken(); // move the START_OBJECT
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchException("failed to parse query [" + docId + "], not starting with OBJECT");
}
while ((token = sourceParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = sourceParser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
return parseQuery(context, mapUnmappedFieldsAsString, sourceParser);
} else {
sourceParser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
sourceParser.skipChildren();
}
}
}
return null;
};
};
}
private final static class LegacyQueryFieldVisitor extends StoredFieldVisitor {
private BytesArray source;
@Override
public void binaryField(FieldInfo fieldInfo, byte[] bytes) throws IOException {
source = new BytesArray(bytes);
}
@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
if (source != null) {
return Status.STOP;
}
if (SourceFieldMapper.NAME.equals(fieldInfo.name)) {
return Status.YES;
} else {
return Status.NO;
}
}
}
} }

View File

@ -51,7 +51,6 @@ import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.percolator.PercolatorQueryCache;
import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@ -82,7 +81,6 @@ public class QueryShardContext extends QueryRewriteContext {
private final Map<String, Query> namedQueries = new HashMap<>(); private final Map<String, Query> namedQueries = new HashMap<>();
private final MapperQueryParser queryParser = new MapperQueryParser(this); private final MapperQueryParser queryParser = new MapperQueryParser(this);
private final IndicesQueriesRegistry indicesQueriesRegistry; private final IndicesQueriesRegistry indicesQueriesRegistry;
private final PercolatorQueryCache percolatorQueryCache;
private boolean allowUnmappedFields; private boolean allowUnmappedFields;
private boolean mapUnmappedFieldAsString; private boolean mapUnmappedFieldAsString;
private NestedScope nestedScope; private NestedScope nestedScope;
@ -90,7 +88,7 @@ public class QueryShardContext extends QueryRewriteContext {
public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService,
MapperService mapperService, SimilarityService similarityService, ScriptService scriptService, MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
final IndicesQueriesRegistry indicesQueriesRegistry, Client client, PercolatorQueryCache percolatorQueryCache, final IndicesQueriesRegistry indicesQueriesRegistry, Client client,
IndexReader reader, ClusterState clusterState) { IndexReader reader, ClusterState clusterState) {
super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState); super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState);
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
@ -100,14 +98,13 @@ public class QueryShardContext extends QueryRewriteContext {
this.indexFieldDataService = indexFieldDataService; this.indexFieldDataService = indexFieldDataService;
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.indicesQueriesRegistry = indicesQueriesRegistry; this.indicesQueriesRegistry = indicesQueriesRegistry;
this.percolatorQueryCache = percolatorQueryCache;
this.nestedScope = new NestedScope(); this.nestedScope = new NestedScope();
} }
public QueryShardContext(QueryShardContext source) { public QueryShardContext(QueryShardContext source) {
this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService,
source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client, source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client,
source.percolatorQueryCache, source.reader, source.clusterState); source.reader, source.clusterState);
this.types = source.getTypes(); this.types = source.getTypes();
} }
@ -123,10 +120,6 @@ public class QueryShardContext extends QueryRewriteContext {
return mapperService.analysisService(); return mapperService.analysisService();
} }
public PercolatorQueryCache getPercolatorQueryCache() {
return percolatorQueryCache;
}
public Similarity getSearchSimilarity() { public Similarity getSearchSimilarity() {
return similarityService != null ? similarityService.similarity(mapperService) : null; return similarityService != null ? similarityService.similarity(mapperService) : null;
} }

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@ -64,7 +65,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
*/ */
public SpanNearQueryBuilder(SpanQueryBuilder initialClause, int slop) { public SpanNearQueryBuilder(SpanQueryBuilder initialClause, int slop) {
if (initialClause == null) { if (initialClause == null) {
throw new IllegalArgumentException("query must include at least one clause"); throw new IllegalArgumentException("[" + NAME + "] must include at least one clause");
} }
this.clauses.add(initialClause); this.clauses.add(initialClause);
this.slop = slop; this.slop = slop;
@ -96,9 +97,12 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
return this.slop; return this.slop;
} }
public SpanNearQueryBuilder clause(SpanQueryBuilder clause) { /**
* Add a span clause to the current list of clauses
*/
public SpanNearQueryBuilder addClause(SpanQueryBuilder clause) {
if (clause == null) { if (clause == null) {
throw new IllegalArgumentException("query clauses cannot be null"); throw new IllegalArgumentException("[" + NAME + "] clauses cannot be null");
} }
clauses.add(clause); clauses.add(clause);
return this; return this;
@ -108,7 +112,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
* @return the {@link SpanQueryBuilder} clauses that were set for this query * @return the {@link SpanQueryBuilder} clauses that were set for this query
*/ */
public List<SpanQueryBuilder> clauses() { public List<SpanQueryBuilder> clauses() {
return this.clauses; return Collections.unmodifiableList(this.clauses);
} }
/** /**
@ -198,7 +202,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder(clauses.get(0), slop); SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder(clauses.get(0), slop);
for (int i = 1; i < clauses.size(); i++) { for (int i = 1; i < clauses.size(); i++) {
queryBuilder.clause(clauses.get(i)); queryBuilder.addClause(clauses.get(i));
} }
queryBuilder.inOrder(inOrder); queryBuilder.inOrder(inOrder);
queryBuilder.boost(boost); queryBuilder.boost(boost);

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@ -48,7 +49,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder<SpanOrQueryBuilder>
public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { public SpanOrQueryBuilder(SpanQueryBuilder initialClause) {
if (initialClause == null) { if (initialClause == null) {
throw new IllegalArgumentException("query must include at least one clause"); throw new IllegalArgumentException("[" + NAME + "] must include at least one clause");
} }
clauses.add(initialClause); clauses.add(initialClause);
} }
@ -68,9 +69,12 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder<SpanOrQueryBuilder>
writeQueries(out, clauses); writeQueries(out, clauses);
} }
public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { /**
* Add a span clause to the current list of clauses
*/
public SpanOrQueryBuilder addClause(SpanQueryBuilder clause) {
if (clause == null) { if (clause == null) {
throw new IllegalArgumentException("inner bool query clause cannot be null"); throw new IllegalArgumentException("[" + NAME + "] inner clause cannot be null");
} }
clauses.add(clause); clauses.add(clause);
return this; return this;
@ -80,7 +84,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder<SpanOrQueryBuilder>
* @return the {@link SpanQueryBuilder} clauses that were set for this query * @return the {@link SpanQueryBuilder} clauses that were set for this query
*/ */
public List<SpanQueryBuilder> clauses() { public List<SpanQueryBuilder> clauses() {
return this.clauses; return Collections.unmodifiableList(this.clauses);
} }
@Override @Override
@ -137,7 +141,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder<SpanOrQueryBuilder>
SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder(clauses.get(0)); SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder(clauses.get(0));
for (int i = 1; i < clauses.size(); i++) { for (int i = 1; i < clauses.size(); i++) {
queryBuilder.clause(clauses.get(i)); queryBuilder.addClause(clauses.get(i));
} }
queryBuilder.boost(boost); queryBuilder.boost(boost);
queryBuilder.queryName(queryName); queryBuilder.queryName(queryName);

View File

@ -576,16 +576,22 @@ public class IndexShard extends AbstractIndexShardComponent {
long bytes = getEngine().getIndexBufferRAMBytesUsed(); long bytes = getEngine().getIndexBufferRAMBytesUsed();
writingBytes.addAndGet(bytes); writingBytes.addAndGet(bytes);
try { try {
logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); if (logger.isTraceEnabled()) {
logger.trace("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes));
}
long time = System.nanoTime(); long time = System.nanoTime();
getEngine().refresh(source); getEngine().refresh(source);
refreshMetric.inc(System.nanoTime() - time); refreshMetric.inc(System.nanoTime() - time);
} finally { } finally {
logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); if (logger.isTraceEnabled()) {
logger.trace("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId());
}
writingBytes.addAndGet(-bytes); writingBytes.addAndGet(-bytes);
} }
} else { } else {
logger.debug("refresh with source [{}]", source); if (logger.isTraceEnabled()) {
logger.trace("refresh with source [{}]", source);
}
long time = System.nanoTime(); long time = System.nanoTime();
getEngine().refresh(source); getEngine().refresh(source);
refreshMetric.inc(System.nanoTime() - time); refreshMetric.inc(System.nanoTime() - time);

View File

@ -60,7 +60,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
private volatile long totalOffset; private volatile long totalOffset;
protected final AtomicBoolean closed = new AtomicBoolean(false); protected final AtomicBoolean closed = new AtomicBoolean(false);
// lock order synchronized(syncLock) -> synchronized(this)
private final Object syncLock = new Object();
public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException { public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException {
super(generation, channel, path, channel.position()); super(generation, channel, path, channel.position());
@ -146,29 +147,15 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
* raising the exception. * raising the exception.
*/ */
public void sync() throws IOException { public void sync() throws IOException {
if (syncNeeded()) { syncUpTo(Long.MAX_VALUE);
synchronized (this) {
ensureOpen();
final long offsetToSync;
final int opsCounter;
try {
outputStream.flush();
offsetToSync = totalOffset;
opsCounter = operationCounter;
checkpoint(offsetToSync, opsCounter, generation, channel, path);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
lastSyncedOffset = offsetToSync;
}
}
} }
/** /**
* returns true if there are buffered ops * returns true if there are buffered ops
*/ */
public boolean syncNeeded() { return totalOffset != lastSyncedOffset; } public boolean syncNeeded() {
return totalOffset != lastSyncedOffset;
}
@Override @Override
public int totalOperations() { public int totalOperations() {
@ -183,7 +170,14 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
/** /**
* closes this writer and transfers it's underlying file channel to a new immutable reader * closes this writer and transfers it's underlying file channel to a new immutable reader
*/ */
public synchronized TranslogReader closeIntoReader() throws IOException { public TranslogReader closeIntoReader() throws IOException {
// make sure to acquire the sync lock first, to prevent dead locks with threads calling
// syncUpTo() , where the sync lock is acquired first, following by the synchronize(this)
//
// Note: While this is not strictly needed as this method is called while blocking all ops on the translog,
// we do this to for correctness and preventing future issues.
synchronized (syncLock) {
synchronized (this) {
try { try {
sync(); // sync before we close.. sync(); // sync before we close..
} catch (IOException e) { } catch (IOException e) {
@ -206,10 +200,16 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy);
} }
} }
}
}
@Override @Override
public synchronized Translog.Snapshot newSnapshot() { public Translog.Snapshot newSnapshot() {
// make sure to acquire the sync lock first, to prevent dead locks with threads calling
// syncUpTo() , where the sync lock is acquired first, following by the synchronize(this)
synchronized (syncLock) {
synchronized (this) {
ensureOpen(); ensureOpen();
try { try {
sync(); sync();
@ -218,6 +218,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
} }
return super.newSnapshot(); return super.newSnapshot();
} }
}
}
private long getWrittenOffset() throws IOException { private long getWrittenOffset() throws IOException {
return channel.position(); return channel.position();
@ -229,10 +231,39 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
* @return <code>true</code> if this call caused an actual sync operation * @return <code>true</code> if this call caused an actual sync operation
*/ */
public boolean syncUpTo(long offset) throws IOException { public boolean syncUpTo(long offset) throws IOException {
if (lastSyncedOffset < offset) { if (lastSyncedOffset < offset && syncNeeded()) {
sync(); synchronized (syncLock) { // only one sync/checkpoint should happen concurrently but we wait
if (lastSyncedOffset < offset && syncNeeded()) {
// double checked locking - we don't want to fsync unless we have to and now that we have
// the lock we should check again since if this code is busy we might have fsynced enough already
final long offsetToSync;
final int opsCounter;
synchronized (this) {
ensureOpen();
try {
outputStream.flush();
offsetToSync = totalOffset;
opsCounter = operationCounter;
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
}
// now do the actual fsync outside of the synchronized block such that
// we can continue writing to the buffer etc.
try {
channel.force(false);
writeCheckpoint(offsetToSync, opsCounter, path.getParent(), generation, StandardOpenOption.WRITE);
} catch (Throwable ex) {
closeWithTragicEvent(ex);
throw ex;
}
assert lastSyncedOffset <= offsetToSync : "illegal state: " + lastSyncedOffset + " <= " + offsetToSync;
lastSyncedOffset = offsetToSync; // write protected by syncLock
return true; return true;
} }
}
}
return false; return false;
} }
@ -254,11 +285,6 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); Channels.readFromFileChannelWithEofException(channel, position, targetBuffer);
} }
private synchronized void checkpoint(long lastSyncPosition, int operationCounter, long generation, FileChannel translogFileChannel, Path translogFilePath) throws IOException {
translogFileChannel.force(false);
writeCheckpoint(lastSyncPosition, operationCounter, translogFilePath.getParent(), generation, StandardOpenOption.WRITE);
}
private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException { private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException {
final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME); final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME);
Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation); Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation);
@ -269,7 +295,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
static final ChannelFactory DEFAULT = new ChannelFactory(); static final ChannelFactory DEFAULT = new ChannelFactory();
// only for testing until we have a disk-full FileSystemt // only for testing until we have a disk-full FileSystem
public FileChannel open(Path file) throws IOException { public FileChannel open(Path file) throws IOException {
return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
} }

View File

@ -280,7 +280,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
if (indexShard.routingEntry() == null) { if (indexShard.routingEntry() == null) {
continue; continue;
} }
IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats()) }); IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesQueryCache, indexShard, flags), indexShard.commitStats()) });
if (!statsByShard.containsKey(indexService.index())) { if (!statsByShard.containsKey(indexService.index())) {
statsByShard.put(indexService.index(), arrayAsArrayList(indexShardStats)); statsByShard.put(indexService.index(), arrayAsArrayList(indexShardStats));
} else { } else {

View File

@ -37,7 +37,6 @@ import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.shard.IndexingStats;
import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.percolator.PercolatorQueryCacheStats;
import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.SearchStats;
@ -102,11 +101,6 @@ public class NodeIndicesStats implements Streamable, ToXContent {
return stats.getSearch(); return stats.getSearch();
} }
@Nullable
public PercolatorQueryCacheStats getPercolate() {
return stats.getPercolatorCache();
}
@Nullable @Nullable
public MergeStats getMerge() { public MergeStats getMerge() {
return stats.getMerge(); return stats.getMerge();

View File

@ -19,14 +19,11 @@
package org.elasticsearch.node.internal; package org.elasticsearch.node.internal;
import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
@ -39,10 +36,13 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import static org.elasticsearch.common.Strings.cleanPath; import static org.elasticsearch.common.Strings.cleanPath;
@ -52,20 +52,18 @@ import static org.elasticsearch.common.Strings.cleanPath;
public class InternalSettingsPreparer { public class InternalSettingsPreparer {
private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"}; private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"};
static final String PROPERTY_PREFIX = "es."; static final String PROPERTY_DEFAULTS_PREFIX = "default.";
static final String PROPERTY_DEFAULTS_PREFIX = "es.default."; static final Predicate<String> PROPERTY_DEFAULTS_PREDICATE = key -> key.startsWith(PROPERTY_DEFAULTS_PREFIX);
public static final String SECRET_PROMPT_VALUE = "${prompt.secret}"; public static final String SECRET_PROMPT_VALUE = "${prompt.secret}";
public static final String TEXT_PROMPT_VALUE = "${prompt.text}"; public static final String TEXT_PROMPT_VALUE = "${prompt.text}";
public static final Setting<Boolean> IGNORE_SYSTEM_PROPERTIES_SETTING =
Setting.boolSetting("config.ignore_system_properties", false, Property.NodeScope);
/** /**
* Prepares the settings by gathering all elasticsearch system properties and setting defaults. * Prepares the settings by gathering all elasticsearch system properties and setting defaults.
*/ */
public static Settings prepareSettings(Settings input) { public static Settings prepareSettings(Settings input) {
Settings.Builder output = Settings.builder(); Settings.Builder output = Settings.builder();
initializeSettings(output, input, true); initializeSettings(output, input, true, Collections.emptyMap());
finalizeSettings(output, null, null); finalizeSettings(output, null, null);
return output.build(); return output.build();
} }
@ -80,9 +78,23 @@ public class InternalSettingsPreparer {
* @return the {@link Settings} and {@link Environment} as a {@link Tuple} * @return the {@link Settings} and {@link Environment} as a {@link Tuple}
*/ */
public static Environment prepareEnvironment(Settings input, Terminal terminal) { public static Environment prepareEnvironment(Settings input, Terminal terminal) {
return prepareEnvironment(input, terminal, Collections.emptyMap());
}
/**
* Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings,
* and then replacing all property placeholders. If a {@link Terminal} is provided and configuration settings are loaded,
* settings with a value of <code>${prompt.text}</code> or <code>${prompt.secret}</code> will result in a prompt for
* the setting to the user.
* @param input The custom settings to use. These are not overwritten by settings in the configuration file.
* @param terminal the Terminal to use for input/output
* @param properties Map of properties key/value pairs (usually from the command-line)
* @return the {@link Settings} and {@link Environment} as a {@link Tuple}
*/
public static Environment prepareEnvironment(Settings input, Terminal terminal, Map<String, String> properties) {
// just create enough settings to build the environment, to get the config dir // just create enough settings to build the environment, to get the config dir
Settings.Builder output = Settings.builder(); Settings.Builder output = Settings.builder();
initializeSettings(output, input, true); initializeSettings(output, input, true, properties);
Environment environment = new Environment(output.build()); Environment environment = new Environment(output.build());
boolean settingsFileFound = false; boolean settingsFileFound = false;
@ -103,7 +115,7 @@ public class InternalSettingsPreparer {
// re-initialize settings now that the config file has been loaded // re-initialize settings now that the config file has been loaded
// TODO: only re-initialize if a config file was actually loaded // TODO: only re-initialize if a config file was actually loaded
initializeSettings(output, input, false); initializeSettings(output, input, false, properties);
finalizeSettings(output, terminal, environment.configFile()); finalizeSettings(output, terminal, environment.configFile());
environment = new Environment(output.build()); environment = new Environment(output.build());
@ -113,22 +125,16 @@ public class InternalSettingsPreparer {
return new Environment(output.build()); return new Environment(output.build());
} }
private static boolean useSystemProperties(Settings input) {
return !IGNORE_SYSTEM_PROPERTIES_SETTING.get(input);
}
/** /**
* Initializes the builder with the given input settings, and loads system properties settings if allowed. * Initializes the builder with the given input settings, and loads system properties settings if allowed.
* If loadDefaults is true, system property default settings are loaded. * If loadDefaults is true, system property default settings are loaded.
*/ */
private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults) { private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults, Map<String, String> esSettings) {
output.put(input); output.put(input);
if (useSystemProperties(input)) {
if (loadDefaults) { if (loadDefaults) {
output.putProperties(PROPERTY_DEFAULTS_PREFIX, BootstrapInfo.getSystemProperties()); output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE, key -> key.substring(PROPERTY_DEFAULTS_PREFIX.length()));
}
output.putProperties(PROPERTY_PREFIX, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIX);
} }
output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE.negate(), Function.identity());
output.replacePropertyPlaceholders(); output.replacePropertyPlaceholders();
} }

View File

@ -27,11 +27,14 @@ import org.elasticsearch.Version;
import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.bootstrap.JarHell;
import org.elasticsearch.cli.Command; import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.SettingCommand;
import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.Terminal;
import org.elasticsearch.cli.UserError; import org.elasticsearch.cli.UserError;
import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.hash.MessageDigests;
import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
@ -56,6 +59,7 @@ import java.util.HashSet;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
@ -95,7 +99,7 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet;
* elasticsearch config directory, using the name of the plugin. If any files to be installed * elasticsearch config directory, using the name of the plugin. If any files to be installed
* already exist, they will be skipped. * already exist, they will be skipped.
*/ */
class InstallPluginCommand extends Command { class InstallPluginCommand extends SettingCommand {
private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging"; private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging";
@ -126,12 +130,12 @@ class InstallPluginCommand extends Command {
"mapper-murmur3", "mapper-murmur3",
"mapper-size", "mapper-size",
"repository-azure", "repository-azure",
"repository-gcs",
"repository-hdfs", "repository-hdfs",
"repository-s3", "repository-s3",
"store-smb", "store-smb",
"x-pack"))); "x-pack")));
private final Environment env;
private final OptionSpec<Void> batchOption; private final OptionSpec<Void> batchOption;
private final OptionSpec<String> arguments; private final OptionSpec<String> arguments;
@ -159,9 +163,8 @@ class InstallPluginCommand extends Command {
FILE_PERMS = Collections.unmodifiableSet(filePerms); FILE_PERMS = Collections.unmodifiableSet(filePerms);
} }
InstallPluginCommand(Environment env) { InstallPluginCommand() {
super("Install a plugin"); super("Install a plugin");
this.env = env;
this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"), this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"),
"Enable batch mode explicitly, automatic confirmation of security permission"); "Enable batch mode explicitly, automatic confirmation of security permission");
this.arguments = parser.nonOptions("plugin id"); this.arguments = parser.nonOptions("plugin id");
@ -177,7 +180,7 @@ class InstallPluginCommand extends Command {
} }
@Override @Override
protected void execute(Terminal terminal, OptionSet options) throws Exception { protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
// TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args
List<String> args = arguments.values(options); List<String> args = arguments.values(options);
if (args.size() != 1) { if (args.size() != 1) {
@ -185,12 +188,12 @@ class InstallPluginCommand extends Command {
} }
String pluginId = args.get(0); String pluginId = args.get(0);
boolean isBatch = options.has(batchOption) || System.console() == null; boolean isBatch = options.has(batchOption) || System.console() == null;
execute(terminal, pluginId, isBatch); execute(terminal, pluginId, isBatch, settings);
} }
// pkg private for testing // pkg private for testing
void execute(Terminal terminal, String pluginId, boolean isBatch) throws Exception { void execute(Terminal terminal, String pluginId, boolean isBatch, Map<String, String> settings) throws Exception {
final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings);
// TODO: remove this leniency!! is it needed anymore? // TODO: remove this leniency!! is it needed anymore?
if (Files.exists(env.pluginsFile()) == false) { if (Files.exists(env.pluginsFile()) == false) {
terminal.println("Plugins directory [" + env.pluginsFile() + "] does not exist. Creating..."); terminal.println("Plugins directory [" + env.pluginsFile() + "] does not exist. Creating...");
@ -199,7 +202,7 @@ class InstallPluginCommand extends Command {
Path pluginZip = download(terminal, pluginId, env.tmpFile()); Path pluginZip = download(terminal, pluginId, env.tmpFile());
Path extractedZip = unzip(pluginZip, env.pluginsFile()); Path extractedZip = unzip(pluginZip, env.pluginsFile());
install(terminal, isBatch, extractedZip); install(terminal, isBatch, extractedZip, env);
} }
/** Downloads the plugin and returns the file it was downloaded to. */ /** Downloads the plugin and returns the file it was downloaded to. */
@ -348,7 +351,7 @@ class InstallPluginCommand extends Command {
} }
/** Load information about the plugin, and verify it can be installed with no errors. */ /** Load information about the plugin, and verify it can be installed with no errors. */
private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch) throws Exception { private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception {
// read and validate the plugin descriptor // read and validate the plugin descriptor
PluginInfo info = PluginInfo.readFromProperties(pluginRoot); PluginInfo info = PluginInfo.readFromProperties(pluginRoot);
terminal.println(VERBOSE, info.toString()); terminal.println(VERBOSE, info.toString());
@ -397,12 +400,12 @@ class InstallPluginCommand extends Command {
* Installs the plugin from {@code tmpRoot} into the plugins dir. * Installs the plugin from {@code tmpRoot} into the plugins dir.
* If the plugin has a bin dir and/or a config dir, those are copied. * If the plugin has a bin dir and/or a config dir, those are copied.
*/ */
private void install(Terminal terminal, boolean isBatch, Path tmpRoot) throws Exception { private void install(Terminal terminal, boolean isBatch, Path tmpRoot, Environment env) throws Exception {
List<Path> deleteOnFailure = new ArrayList<>(); List<Path> deleteOnFailure = new ArrayList<>();
deleteOnFailure.add(tmpRoot); deleteOnFailure.add(tmpRoot);
try { try {
PluginInfo info = verify(terminal, tmpRoot, isBatch); PluginInfo info = verify(terminal, tmpRoot, isBatch, env);
final Path destination = env.pluginsFile().resolve(info.getName()); final Path destination = env.pluginsFile().resolve(info.getName());
if (Files.exists(destination)) { if (Files.exists(destination)) {

View File

@ -19,6 +19,13 @@
package org.elasticsearch.plugins; package org.elasticsearch.plugins;
import joptsimple.OptionSet;
import org.elasticsearch.cli.SettingCommand;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import java.io.IOException; import java.io.IOException;
import java.nio.file.DirectoryStream; import java.nio.file.DirectoryStream;
import java.nio.file.Files; import java.nio.file.Files;
@ -26,26 +33,20 @@ import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import joptsimple.OptionSet;
import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.env.Environment;
/** /**
* A command for the plugin cli to list plugins installed in elasticsearch. * A command for the plugin cli to list plugins installed in elasticsearch.
*/ */
class ListPluginsCommand extends Command { class ListPluginsCommand extends SettingCommand {
private final Environment env; ListPluginsCommand() {
ListPluginsCommand(Environment env) {
super("Lists installed elasticsearch plugins"); super("Lists installed elasticsearch plugins");
this.env = env;
} }
@Override @Override
protected void execute(Terminal terminal, OptionSet options) throws Exception { protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings);
if (Files.exists(env.pluginsFile()) == false) { if (Files.exists(env.pluginsFile()) == false) {
throw new IOException("Plugins directory missing: " + env.pluginsFile()); throw new IOException("Plugins directory missing: " + env.pluginsFile());
} }

View File

@ -26,21 +26,24 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.internal.InternalSettingsPreparer;
import java.util.Collections;
/** /**
* A cli tool for adding, removing and listing plugins for elasticsearch. * A cli tool for adding, removing and listing plugins for elasticsearch.
*/ */
public class PluginCli extends MultiCommand { public class PluginCli extends MultiCommand {
public PluginCli(Environment env) { public PluginCli() {
super("A tool for managing installed elasticsearch plugins"); super("A tool for managing installed elasticsearch plugins");
subcommands.put("list", new ListPluginsCommand(env)); subcommands.put("list", new ListPluginsCommand());
subcommands.put("install", new InstallPluginCommand(env)); subcommands.put("install", new InstallPluginCommand());
subcommands.put("remove", new RemovePluginCommand(env)); subcommands.put("remove", new RemovePluginCommand());
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
// initialize default for es.logger.level because we will not read the logging.yml // initialize default for es.logger.level because we will not read the logging.yml
String loggerLevel = System.getProperty("es.logger.level", "INFO"); String loggerLevel = System.getProperty("es.logger.level", "INFO");
String pathHome = System.getProperty("es.path.home");
// Set the appender for all potential log files to terminal so that other components that use the logger print out the // Set the appender for all potential log files to terminal so that other components that use the logger print out the
// same terminal. // same terminal.
// The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is // The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is
@ -48,12 +51,14 @@ public class PluginCli extends MultiCommand {
// is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs. // is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs.
// Therefore we print to Terminal. // Therefore we print to Terminal.
Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder() Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder()
.put("path.home", pathHome)
.put("appender.terminal.type", "terminal") .put("appender.terminal.type", "terminal")
.put("rootLogger", "${es.logger.level}, terminal") .put("rootLogger", "${logger.level}, terminal")
.put("es.logger.level", loggerLevel) .put("logger.level", loggerLevel)
.build(), Terminal.DEFAULT); .build(), Terminal.DEFAULT);
LogConfigurator.configure(loggingEnvironment.settings(), false); LogConfigurator.configure(loggingEnvironment.settings(), false);
Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT);
exit(new PluginCli(env).main(args, Terminal.DEFAULT)); exit(new PluginCli().main(args, Terminal.DEFAULT));
} }
} }

View File

@ -24,45 +24,49 @@ import java.nio.file.Path;
import java.nio.file.StandardCopyOption; import java.nio.file.StandardCopyOption;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
import joptsimple.OptionSet; import joptsimple.OptionSet;
import joptsimple.OptionSpec; import joptsimple.OptionSpec;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.cli.Command; import org.elasticsearch.cli.Command;
import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.SettingCommand;
import org.elasticsearch.cli.UserError; import org.elasticsearch.cli.UserError;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE; import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE;
/** /**
* A command for the plugin cli to remove a plugin from elasticsearch. * A command for the plugin cli to remove a plugin from elasticsearch.
*/ */
class RemovePluginCommand extends Command { class RemovePluginCommand extends SettingCommand {
private final Environment env;
private final OptionSpec<String> arguments; private final OptionSpec<String> arguments;
RemovePluginCommand(Environment env) { RemovePluginCommand() {
super("Removes a plugin from elasticsearch"); super("Removes a plugin from elasticsearch");
this.env = env;
this.arguments = parser.nonOptions("plugin name"); this.arguments = parser.nonOptions("plugin name");
} }
@Override @Override
protected void execute(Terminal terminal, OptionSet options) throws Exception { protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
// TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args // TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args
List<String> args = arguments.values(options); List<String> args = arguments.values(options);
if (args.size() != 1) { if (args.size() != 1) {
throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument"); throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument");
} }
execute(terminal, args.get(0)); execute(terminal, args.get(0), settings);
} }
// pkg private for testing // pkg private for testing
void execute(Terminal terminal, String pluginName) throws Exception { void execute(Terminal terminal, String pluginName, Map<String, String> settings) throws Exception {
final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings);
terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "..."); terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "...");
Path pluginDir = env.pluginsFile().resolve(pluginName); Path pluginDir = env.pluginsFile().resolve(pluginName);

View File

@ -123,9 +123,9 @@ public class BytesRestResponse extends RestResponse {
params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request()); params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request());
} else { } else {
if (status.getStatus() < 500) { if (status.getStatus() < 500) {
SUPPRESSED_ERROR_LOGGER.debug("{} Params: {}", t, channel.request().path(), channel.request().params()); SUPPRESSED_ERROR_LOGGER.debug("path: {}, params: {}", t, channel.request().rawPath(), channel.request().params());
} else { } else {
SUPPRESSED_ERROR_LOGGER.warn("{} Params: {}", t, channel.request().path(), channel.request().params()); SUPPRESSED_ERROR_LOGGER.warn("path: {}, params: {}", t, channel.request().rawPath(), channel.request().params());
} }
params = channel.request(); params = channel.request();
} }

View File

@ -64,6 +64,7 @@ public class RestClusterRerouteAction extends BaseRestHandler {
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception { public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception {
final ClusterRerouteRequest clusterRerouteRequest = Requests.clusterRerouteRequest(); final ClusterRerouteRequest clusterRerouteRequest = Requests.clusterRerouteRequest();
clusterRerouteRequest.dryRun(request.paramAsBoolean("dry_run", clusterRerouteRequest.dryRun())); clusterRerouteRequest.dryRun(request.paramAsBoolean("dry_run", clusterRerouteRequest.dryRun()));
clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed()));
clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain())); clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain()));
clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout())); clusterRerouteRequest.timeout(request.paramAsTime("timeout", clusterRerouteRequest.timeout()));
clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout())); clusterRerouteRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterRerouteRequest.masterNodeTimeout()));

View File

@ -78,7 +78,6 @@ public class RestIndicesStatsAction extends BaseRestHandler {
indicesStatsRequest.flush(metrics.contains("flush")); indicesStatsRequest.flush(metrics.contains("flush"));
indicesStatsRequest.warmer(metrics.contains("warmer")); indicesStatsRequest.warmer(metrics.contains("warmer"));
indicesStatsRequest.queryCache(metrics.contains("query_cache")); indicesStatsRequest.queryCache(metrics.contains("query_cache"));
indicesStatsRequest.percolate(metrics.contains("percolator_cache"));
indicesStatsRequest.segments(metrics.contains("segments")); indicesStatsRequest.segments(metrics.contains("segments"));
indicesStatsRequest.fieldData(metrics.contains("fielddata")); indicesStatsRequest.fieldData(metrics.contains("fielddata"));
indicesStatsRequest.completion(metrics.contains("completion")); indicesStatsRequest.completion(metrics.contains("completion"));

View File

@ -222,9 +222,6 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell("merges.total_time", "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); table.addCell("merges.total_time", "sibling:pri;alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges");
table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges"); table.addCell("pri.merges.total_time", "default:false;text-align:right;desc:time spent in merges");
table.addCell("percolate.queries", "sibling:pri;alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries");
table.addCell("pri.percolate.queries", "default:false;text-align:right;desc:number of registered percolation queries");
table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.total", "sibling:pri;alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes");
table.addCell("pri.refresh.total", "default:false;text-align:right;desc:total refreshes"); table.addCell("pri.refresh.total", "default:false;text-align:right;desc:total refreshes");
@ -424,9 +421,6 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalTime()); table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalTime());
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalTime()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalTime());
table.addCell(indexStats == null ? null : indexStats.getTotal().getPercolatorCache().getNumQueries());
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getPercolatorCache().getNumQueries());
table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getTotal()); table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getTotal());
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getTotal()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getTotal());

View File

@ -45,7 +45,6 @@ import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.percolator.PercolatorQueryCacheStats;
import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.SearchStats;
import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.shard.IndexingStats;
@ -185,8 +184,6 @@ public class RestNodesAction extends AbstractCatAction {
table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged");
table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges");
table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries");
table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes");
table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes");
@ -338,9 +335,6 @@ public class RestNodesAction extends AbstractCatAction {
table.addCell(mergeStats == null ? null : mergeStats.getTotalSize()); table.addCell(mergeStats == null ? null : mergeStats.getTotalSize());
table.addCell(mergeStats == null ? null : mergeStats.getTotalTime()); table.addCell(mergeStats == null ? null : mergeStats.getTotalTime());
PercolatorQueryCacheStats percolatorQueryCacheStats = indicesStats == null ? null : indicesStats.getPercolate();
table.addCell(percolatorQueryCacheStats == null ? null : percolatorQueryCacheStats.getNumQueries());
RefreshStats refreshStats = indicesStats == null ? null : indicesStats.getRefresh(); RefreshStats refreshStats = indicesStats == null ? null : indicesStats.getRefresh();
table.addCell(refreshStats == null ? null : refreshStats.getTotal()); table.addCell(refreshStats == null ? null : refreshStats.getTotal());
table.addCell(refreshStats == null ? null : refreshStats.getTotalTime()); table.addCell(refreshStats == null ? null : refreshStats.getTotalTime());

View File

@ -139,8 +139,6 @@ public class RestShardsAction extends AbstractCatAction {
table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged"); table.addCell("merges.total_size", "alias:mts,mergesTotalSize;default:false;text-align:right;desc:size merged");
table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges"); table.addCell("merges.total_time", "alias:mtt,mergesTotalTime;default:false;text-align:right;desc:time spent in merges");
table.addCell("percolate.queries", "alias:pq,percolateQueries;default:false;text-align:right;desc:number of registered percolation queries");
table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes"); table.addCell("refresh.total", "alias:rto,refreshTotal;default:false;text-align:right;desc:total refreshes");
table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes"); table.addCell("refresh.time", "alias:rti,refreshTime;default:false;text-align:right;desc:time spent in refreshes");
@ -278,8 +276,6 @@ public class RestShardsAction extends AbstractCatAction {
table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalSize()); table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalSize());
table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalTime()); table.addCell(commonStats == null ? null : commonStats.getMerge().getTotalTime());
table.addCell(commonStats == null ? null : commonStats.getPercolatorCache().getNumQueries());
table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotal()); table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotal());
table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotalTime()); table.addCell(commonStats == null ? null : commonStats.getRefresh().getTotalTime());

View File

@ -30,13 +30,13 @@ public class ScriptEngineRegistry {
private final Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices; private final Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices;
private final Map<String, Class<? extends ScriptEngineService>> registeredLanguages; private final Map<String, Class<? extends ScriptEngineService>> registeredLanguages;
private final Map<String, ScriptMode> defaultInlineScriptModes; private final Map<String, Boolean> defaultInlineScriptEnableds;
public ScriptEngineRegistry(Iterable<ScriptEngineRegistration> registrations) { public ScriptEngineRegistry(Iterable<ScriptEngineRegistration> registrations) {
Objects.requireNonNull(registrations); Objects.requireNonNull(registrations);
Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices = new HashMap<>(); Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices = new HashMap<>();
Map<String, Class<? extends ScriptEngineService>> registeredLanguages = new HashMap<>(); Map<String, Class<? extends ScriptEngineService>> registeredLanguages = new HashMap<>();
Map<String, ScriptMode> inlineScriptModes = new HashMap<>(); Map<String, Boolean> inlineScriptEnableds = new HashMap<>();
for (ScriptEngineRegistration registration : registrations) { for (ScriptEngineRegistration registration : registrations) {
String oldLanguage = registeredScriptEngineServices.putIfAbsent(registration.getScriptEngineService(), String oldLanguage = registeredScriptEngineServices.putIfAbsent(registration.getScriptEngineService(),
registration.getScriptEngineLanguage()); registration.getScriptEngineLanguage());
@ -51,12 +51,12 @@ public class ScriptEngineRegistry {
throw new IllegalArgumentException("scripting language [" + language + "] already registered for script engine service [" + throw new IllegalArgumentException("scripting language [" + language + "] already registered for script engine service [" +
scriptEngineServiceClazz.getCanonicalName() + "]"); scriptEngineServiceClazz.getCanonicalName() + "]");
} }
inlineScriptModes.put(language, registration.getDefaultInlineScriptMode()); inlineScriptEnableds.put(language, registration.getDefaultInlineScriptEnabled());
} }
this.registeredScriptEngineServices = Collections.unmodifiableMap(registeredScriptEngineServices); this.registeredScriptEngineServices = Collections.unmodifiableMap(registeredScriptEngineServices);
this.registeredLanguages = Collections.unmodifiableMap(registeredLanguages); this.registeredLanguages = Collections.unmodifiableMap(registeredLanguages);
this.defaultInlineScriptModes = Collections.unmodifiableMap(inlineScriptModes); this.defaultInlineScriptEnableds = Collections.unmodifiableMap(inlineScriptEnableds);
} }
Iterable<Class<? extends ScriptEngineService>> getRegisteredScriptEngineServices() { Iterable<Class<? extends ScriptEngineService>> getRegisteredScriptEngineServices() {
@ -72,27 +72,27 @@ public class ScriptEngineRegistry {
return registeredLanguages; return registeredLanguages;
} }
Map<String, ScriptMode> getDefaultInlineScriptModes() { Map<String, Boolean> getDefaultInlineScriptEnableds() {
return this.defaultInlineScriptModes; return this.defaultInlineScriptEnableds;
} }
public static class ScriptEngineRegistration { public static class ScriptEngineRegistration {
private final Class<? extends ScriptEngineService> scriptEngineService; private final Class<? extends ScriptEngineService> scriptEngineService;
private final String scriptEngineLanguage; private final String scriptEngineLanguage;
private final ScriptMode defaultInlineScriptMode; private final boolean defaultInlineScriptEnabled;
/** /**
* Register a script engine service with the default of inline scripts disabled * Register a script engine service with the default of inline scripts disabled
*/ */
public ScriptEngineRegistration(Class<? extends ScriptEngineService> scriptEngineService, String scriptEngineLanguage) { public ScriptEngineRegistration(Class<? extends ScriptEngineService> scriptEngineService, String scriptEngineLanguage) {
this(scriptEngineService, scriptEngineLanguage, ScriptMode.OFF); this(scriptEngineService, scriptEngineLanguage, false);
} }
/** /**
* Register a script engine service with the given default mode for inline scripts * Register a script engine service with the given default mode for inline scripts
*/ */
public ScriptEngineRegistration(Class<? extends ScriptEngineService> scriptEngineService, String scriptEngineLanguage, public ScriptEngineRegistration(Class<? extends ScriptEngineService> scriptEngineService, String scriptEngineLanguage,
ScriptMode defaultInlineScriptMode) { boolean defaultInlineScriptEnabled) {
Objects.requireNonNull(scriptEngineService); Objects.requireNonNull(scriptEngineService);
if (Strings.hasText(scriptEngineLanguage) == false) { if (Strings.hasText(scriptEngineLanguage) == false) {
throw new IllegalArgumentException("languages for script engine service [" + throw new IllegalArgumentException("languages for script engine service [" +
@ -100,7 +100,7 @@ public class ScriptEngineRegistry {
} }
this.scriptEngineService = scriptEngineService; this.scriptEngineService = scriptEngineService;
this.scriptEngineLanguage = scriptEngineLanguage; this.scriptEngineLanguage = scriptEngineLanguage;
this.defaultInlineScriptMode = defaultInlineScriptMode; this.defaultInlineScriptEnabled = defaultInlineScriptEnabled;
} }
Class<? extends ScriptEngineService> getScriptEngineService() { Class<? extends ScriptEngineService> getScriptEngineService() {
@ -111,8 +111,8 @@ public class ScriptEngineRegistry {
return scriptEngineLanguage; return scriptEngineLanguage;
} }
ScriptMode getDefaultInlineScriptMode() { boolean getDefaultInlineScriptEnabled() {
return defaultInlineScriptMode; return defaultInlineScriptEnabled;
} }
} }

View File

@ -1,64 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import java.util.HashMap;
import java.util.Map;
/**
* Mode for a specific script, used for script settings.
* Defines whether a certain script or category of scripts can be executed or not.
*/
public enum ScriptMode {
ON("true"),
OFF("false");
private final String mode;
ScriptMode(String mode) {
this.mode = mode;
}
private static final Map<String, ScriptMode> SCRIPT_MODES;
static {
SCRIPT_MODES = new HashMap<>();
for (ScriptMode scriptMode : ScriptMode.values()) {
SCRIPT_MODES.put(scriptMode.mode, scriptMode);
}
}
static ScriptMode parse(String input) {
ScriptMode scriptMode = SCRIPT_MODES.get(input);
if (scriptMode == null) {
throw new IllegalArgumentException("script mode [" + input + "] not supported");
}
return scriptMode;
}
public String getMode() {
return mode;
}
@Override
public String toString() {
return mode;
}
}

View File

@ -29,22 +29,22 @@ import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
/** /**
* Holds the {@link org.elasticsearch.script.ScriptMode}s for each of the different scripting languages available, * Holds the boolean indicating the enabled mode for each of the different scripting languages available, each script source and each
* each script source and each scripted operation. * scripted operation.
*/ */
public class ScriptModes { public class ScriptModes {
private static final String SCRIPT_SETTINGS_PREFIX = "script"; private static final String SCRIPT_SETTINGS_PREFIX = "script";
private static final String ENGINE_SETTINGS_PREFIX = "script.engine"; private static final String ENGINE_SETTINGS_PREFIX = "script.engine";
final Map<String, ScriptMode> scriptModes; final Map<String, Boolean> scriptEnabled;
ScriptModes(ScriptSettings scriptSettings, Settings settings) { ScriptModes(ScriptSettings scriptSettings, Settings settings) {
HashMap<String, ScriptMode> scriptModes = new HashMap<>(); HashMap<String, Boolean> scriptModes = new HashMap<>();
for (Setting<ScriptMode> scriptModeSetting : scriptSettings.getScriptLanguageSettings()) { for (Setting<Boolean> scriptModeSetting : scriptSettings.getScriptLanguageSettings()) {
scriptModes.put(scriptModeSetting.getKey(), scriptModeSetting.get(settings)); scriptModes.put(scriptModeSetting.getKey(), scriptModeSetting.get(settings));
} }
this.scriptModes = Collections.unmodifiableMap(scriptModes); this.scriptEnabled = Collections.unmodifiableMap(scriptModes);
} }
/** /**
@ -54,14 +54,14 @@ public class ScriptModes {
* @param lang the language that the script is written in * @param lang the language that the script is written in
* @param scriptType the type of the script * @param scriptType the type of the script
* @param scriptContext the operation that requires the execution of the script * @param scriptContext the operation that requires the execution of the script
* @return whether scripts are on or off * @return whether scripts are enabled (true) or disabled (false)
*/ */
public ScriptMode getScriptMode(String lang, ScriptType scriptType, ScriptContext scriptContext) { public boolean getScriptEnabled(String lang, ScriptType scriptType, ScriptContext scriptContext) {
//native scripts are always on as they are static by definition //native scripts are always enabled as they are static by definition
if (NativeScriptEngineService.NAME.equals(lang)) { if (NativeScriptEngineService.NAME.equals(lang)) {
return ScriptMode.ON; return true;
} }
ScriptMode scriptMode = scriptModes.get(getKey(lang, scriptType, scriptContext)); Boolean scriptMode = scriptEnabled.get(getKey(lang, scriptType, scriptContext));
if (scriptMode == null) { if (scriptMode == null) {
throw new IllegalArgumentException("script mode not found for lang [" + lang + "], script_type [" + scriptType + "], operation [" + scriptContext.getKey() + "]"); throw new IllegalArgumentException("script mode not found for lang [" + lang + "], script_type [" + scriptType + "], operation [" + scriptContext.getKey() + "]");
} }
@ -87,10 +87,10 @@ public class ScriptModes {
@Override @Override
public String toString() { public String toString() {
//order settings by key before printing them out, for readability //order settings by key before printing them out, for readability
TreeMap<String, ScriptMode> scriptModesTreeMap = new TreeMap<>(); TreeMap<String, Boolean> scriptModesTreeMap = new TreeMap<>();
scriptModesTreeMap.putAll(scriptModes); scriptModesTreeMap.putAll(scriptEnabled);
StringBuilder stringBuilder = new StringBuilder(); StringBuilder stringBuilder = new StringBuilder();
for (Map.Entry<String, ScriptMode> stringScriptModeEntry : scriptModesTreeMap.entrySet()) { for (Map.Entry<String, Boolean> stringScriptModeEntry : scriptModesTreeMap.entrySet()) {
stringBuilder.append(stringScriptModeEntry.getKey()).append(": ").append(stringScriptModeEntry.getValue()).append("\n"); stringBuilder.append(stringScriptModeEntry.getKey()).append(": ").append(stringScriptModeEntry.getValue()).append("\n");
} }
return stringBuilder.toString(); return stringBuilder.toString();

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.inject.multibindings.MapBinder;
import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.inject.multibindings.Multibinder;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.script.ScriptMode;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
@ -42,7 +41,7 @@ public class ScriptModule extends AbstractModule {
{ {
scriptEngineRegistrations.add(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, scriptEngineRegistrations.add(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class,
NativeScriptEngineService.NAME, ScriptMode.ON)); NativeScriptEngineService.NAME, true));
} }
private final Map<String, Class<? extends NativeScriptFactory>> scripts = new HashMap<>(); private final Map<String, Class<? extends NativeScriptFactory>> scripts = new HashMap<>();

View File

@ -473,15 +473,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
if (scriptContextRegistry.isSupportedContext(scriptContext) == false) { if (scriptContextRegistry.isSupportedContext(scriptContext) == false) {
throw new IllegalArgumentException("script context [" + scriptContext.getKey() + "] not supported"); throw new IllegalArgumentException("script context [" + scriptContext.getKey() + "] not supported");
} }
ScriptMode mode = scriptModes.getScriptMode(lang, scriptType, scriptContext); return scriptModes.getScriptEnabled(lang, scriptType, scriptContext);
switch (mode) {
case ON:
return true;
case OFF:
return false;
default:
throw new IllegalArgumentException("script mode [" + mode + "] not supported");
}
} }
public ScriptStats stats() { public ScriptStats stats() {
@ -610,14 +602,14 @@ public class ScriptService extends AbstractComponent implements Closeable {
*/ */
public enum ScriptType { public enum ScriptType {
INLINE(0, "inline", "inline", ScriptMode.OFF), INLINE(0, "inline", "inline", false),
STORED(1, "id", "stored", ScriptMode.OFF), STORED(1, "id", "stored", false),
FILE(2, "file", "file", ScriptMode.ON); FILE(2, "file", "file", true);
private final int val; private final int val;
private final ParseField parseField; private final ParseField parseField;
private final String scriptType; private final String scriptType;
private final ScriptMode defaultScriptMode; private final boolean defaultScriptEnabled;
public static ScriptType readFrom(StreamInput in) throws IOException { public static ScriptType readFrom(StreamInput in) throws IOException {
int scriptTypeVal = in.readVInt(); int scriptTypeVal = in.readVInt();
@ -638,19 +630,19 @@ public class ScriptService extends AbstractComponent implements Closeable {
} }
} }
ScriptType(int val, String name, String scriptType, ScriptMode defaultScriptMode) { ScriptType(int val, String name, String scriptType, boolean defaultScriptEnabled) {
this.val = val; this.val = val;
this.parseField = new ParseField(name); this.parseField = new ParseField(name);
this.scriptType = scriptType; this.scriptType = scriptType;
this.defaultScriptMode = defaultScriptMode; this.defaultScriptEnabled = defaultScriptEnabled;
} }
public ParseField getParseField() { public ParseField getParseField() {
return parseField; return parseField;
} }
public ScriptMode getDefaultScriptMode() { public boolean getDefaultScriptEnabled() {
return defaultScriptMode; return defaultScriptEnabled;
} }
public String getScriptType() { public String getScriptType() {

View File

@ -37,29 +37,28 @@ public class ScriptSettings {
public final static String DEFAULT_LANG = "groovy"; public final static String DEFAULT_LANG = "groovy";
private final static Map<ScriptService.ScriptType, Setting<ScriptMode>> SCRIPT_TYPE_SETTING_MAP; private final static Map<ScriptService.ScriptType, Setting<Boolean>> SCRIPT_TYPE_SETTING_MAP;
static { static {
Map<ScriptService.ScriptType, Setting<ScriptMode>> scriptTypeSettingMap = new HashMap<>(); Map<ScriptService.ScriptType, Setting<Boolean>> scriptTypeSettingMap = new HashMap<>();
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
scriptTypeSettingMap.put(scriptType, new Setting<>( scriptTypeSettingMap.put(scriptType, Setting.boolSetting(
ScriptModes.sourceKey(scriptType), ScriptModes.sourceKey(scriptType),
scriptType.getDefaultScriptMode().getMode(), scriptType.getDefaultScriptEnabled(),
ScriptMode::parse,
Property.NodeScope)); Property.NodeScope));
} }
SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap); SCRIPT_TYPE_SETTING_MAP = Collections.unmodifiableMap(scriptTypeSettingMap);
} }
private final Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap; private final Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap;
private final List<Setting<ScriptMode>> scriptLanguageSettings; private final List<Setting<Boolean>> scriptLanguageSettings;
private final Setting<String> defaultScriptLanguageSetting; private final Setting<String> defaultScriptLanguageSetting;
public ScriptSettings(ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) { public ScriptSettings(ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry) {
Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap = contextSettings(scriptContextRegistry); Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap = contextSettings(scriptContextRegistry);
this.scriptContextSettingMap = Collections.unmodifiableMap(scriptContextSettingMap); this.scriptContextSettingMap = Collections.unmodifiableMap(scriptContextSettingMap);
List<Setting<ScriptMode>> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry); List<Setting<Boolean>> scriptLanguageSettings = languageSettings(SCRIPT_TYPE_SETTING_MAP, scriptContextSettingMap, scriptEngineRegistry, scriptContextRegistry);
this.scriptLanguageSettings = Collections.unmodifiableList(scriptLanguageSettings); this.scriptLanguageSettings = Collections.unmodifiableList(scriptLanguageSettings);
this.defaultScriptLanguageSetting = new Setting<>("script.default_lang", DEFAULT_LANG, setting -> { this.defaultScriptLanguageSetting = new Setting<>("script.default_lang", DEFAULT_LANG, setting -> {
@ -70,24 +69,20 @@ public class ScriptSettings {
}, Property.NodeScope); }, Property.NodeScope);
} }
private static Map<ScriptContext, Setting<ScriptMode>> contextSettings(ScriptContextRegistry scriptContextRegistry) { private static Map<ScriptContext, Setting<Boolean>> contextSettings(ScriptContextRegistry scriptContextRegistry) {
Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap = new HashMap<>(); Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap = new HashMap<>();
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
scriptContextSettingMap.put(scriptContext, new Setting<>( scriptContextSettingMap.put(scriptContext,
ScriptModes.operationKey(scriptContext), Setting.boolSetting(ScriptModes.operationKey(scriptContext), false, Property.NodeScope));
ScriptMode.OFF.getMode(),
ScriptMode::parse,
Property.NodeScope
));
} }
return scriptContextSettingMap; return scriptContextSettingMap;
} }
private static List<Setting<ScriptMode>> languageSettings(Map<ScriptService.ScriptType, Setting<ScriptMode>> scriptTypeSettingMap, private static List<Setting<Boolean>> languageSettings(Map<ScriptService.ScriptType, Setting<Boolean>> scriptTypeSettingMap,
Map<ScriptContext, Setting<ScriptMode>> scriptContextSettingMap, Map<ScriptContext, Setting<Boolean>> scriptContextSettingMap,
ScriptEngineRegistry scriptEngineRegistry, ScriptEngineRegistry scriptEngineRegistry,
ScriptContextRegistry scriptContextRegistry) { ScriptContextRegistry scriptContextRegistry) {
final List<Setting<ScriptMode>> scriptModeSettings = new ArrayList<>(); final List<Setting<Boolean>> scriptModeSettings = new ArrayList<>();
for (final Class<? extends ScriptEngineService> scriptEngineService : scriptEngineRegistry.getRegisteredScriptEngineServices()) { for (final Class<? extends ScriptEngineService> scriptEngineService : scriptEngineRegistry.getRegisteredScriptEngineServices()) {
if (scriptEngineService == NativeScriptEngineService.class) { if (scriptEngineService == NativeScriptEngineService.class) {
@ -97,17 +92,17 @@ public class ScriptSettings {
final String language = scriptEngineRegistry.getLanguage(scriptEngineService); final String language = scriptEngineRegistry.getLanguage(scriptEngineService);
for (final ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) { for (final ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
// Top level, like "script.engine.groovy.inline" // Top level, like "script.engine.groovy.inline"
final ScriptMode defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptModes().get(language); final boolean defaultNonFileScriptMode = scriptEngineRegistry.getDefaultInlineScriptEnableds().get(language);
ScriptMode defaultLangAndType = defaultNonFileScriptMode; boolean defaultLangAndType = defaultNonFileScriptMode;
// Files are treated differently because they are never default-deny // Files are treated differently because they are never default-deny
if (ScriptService.ScriptType.FILE == scriptType) { if (ScriptService.ScriptType.FILE == scriptType) {
defaultLangAndType = ScriptService.ScriptType.FILE.getDefaultScriptMode(); defaultLangAndType = ScriptService.ScriptType.FILE.getDefaultScriptEnabled();
} }
final ScriptMode defaultIfNothingSet = defaultLangAndType; final boolean defaultIfNothingSet = defaultLangAndType;
// Setting for something like "script.engine.groovy.inline" // Setting for something like "script.engine.groovy.inline"
final Setting<ScriptMode> langAndTypeSetting = new Setting<>(ScriptModes.getGlobalKey(language, scriptType), final Setting<Boolean> langAndTypeSetting = Setting.boolSetting(ScriptModes.getGlobalKey(language, scriptType),
defaultLangAndType.toString(), ScriptMode::parse, Property.NodeScope); defaultLangAndType, Property.NodeScope);
scriptModeSettings.add(langAndTypeSetting); scriptModeSettings.add(langAndTypeSetting);
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
@ -115,32 +110,31 @@ public class ScriptSettings {
// A function that, given a setting, will return what the default should be. Since the fine-grained script settings // A function that, given a setting, will return what the default should be. Since the fine-grained script settings
// read from a bunch of different places this is implemented in this way. // read from a bunch of different places this is implemented in this way.
Function<Settings, String> defaultSettingFn = settings -> { Function<Settings, String> defaultSettingFn = settings -> {
final Setting<ScriptMode> globalOpSetting = scriptContextSettingMap.get(scriptContext); final Setting<Boolean> globalOpSetting = scriptContextSettingMap.get(scriptContext);
final Setting<ScriptMode> globalTypeSetting = scriptTypeSettingMap.get(scriptType); final Setting<Boolean> globalTypeSetting = scriptTypeSettingMap.get(scriptType);
final Setting<ScriptMode> langAndTypeAndContextSetting = new Setting<>(langAndTypeAndContextName, final Setting<Boolean> langAndTypeAndContextSetting = Setting.boolSetting(langAndTypeAndContextName,
defaultIfNothingSet.toString(), ScriptMode::parse, Property.NodeScope); defaultIfNothingSet, Property.NodeScope);
// fallback logic for script mode settings // fallback logic for script mode settings
if (langAndTypeAndContextSetting.exists(settings)) { if (langAndTypeAndContextSetting.exists(settings)) {
// like: "script.engine.groovy.inline.aggs: true" // like: "script.engine.groovy.inline.aggs: true"
return langAndTypeAndContextSetting.get(settings).getMode(); return langAndTypeAndContextSetting.get(settings).toString();
} else if (langAndTypeSetting.exists(settings)) { } else if (langAndTypeSetting.exists(settings)) {
// like: "script.engine.groovy.inline: true" // like: "script.engine.groovy.inline: true"
return langAndTypeSetting.get(settings).getMode(); return langAndTypeSetting.get(settings).toString();
} else if (globalOpSetting.exists(settings)) { } else if (globalOpSetting.exists(settings)) {
// like: "script.aggs: true" // like: "script.aggs: true"
return globalOpSetting.get(settings).getMode(); return globalOpSetting.get(settings).toString();
} else if (globalTypeSetting.exists(settings)) { } else if (globalTypeSetting.exists(settings)) {
// like: "script.inline: true" // like: "script.inline: true"
return globalTypeSetting.get(settings).getMode(); return globalTypeSetting.get(settings).toString();
} else { } else {
// Nothing is set! // Nothing is set!
return defaultIfNothingSet.getMode(); return Boolean.toString(defaultIfNothingSet);
} }
}; };
// The actual setting for finest grained script settings // The actual setting for finest grained script settings
Setting<ScriptMode> setting = new Setting<>(langAndTypeAndContextName, defaultSettingFn, Setting<Boolean> setting = Setting.boolSetting(langAndTypeAndContextName, defaultSettingFn, Property.NodeScope);
ScriptMode::parse, Property.NodeScope);
scriptModeSettings.add(setting); scriptModeSettings.add(setting);
} }
} }
@ -148,15 +142,15 @@ public class ScriptSettings {
return scriptModeSettings; return scriptModeSettings;
} }
public Iterable<Setting<ScriptMode>> getScriptTypeSettings() { public Iterable<Setting<Boolean>> getScriptTypeSettings() {
return Collections.unmodifiableCollection(SCRIPT_TYPE_SETTING_MAP.values()); return Collections.unmodifiableCollection(SCRIPT_TYPE_SETTING_MAP.values());
} }
public Iterable<Setting<ScriptMode>> getScriptContextSettings() { public Iterable<Setting<Boolean>> getScriptContextSettings() {
return Collections.unmodifiableCollection(scriptContextSettingMap.values()); return Collections.unmodifiableCollection(scriptContextSettingMap.values());
} }
public Iterable<Setting<ScriptMode>> getScriptLanguageSettings() { public Iterable<Setting<Boolean>> getScriptLanguageSettings() {
return scriptLanguageSettings; return scriptLanguageSettings;
} }

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase; import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase;
@ -97,51 +98,51 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.action.SearchTransportService; import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.aggregations.AggregationPhase;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.children.InternalChildren; import org.elasticsearch.search.aggregations.bucket.children.InternalChildren;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters; import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser;
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramParser; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramParser;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram;
import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing;
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.missing.MissingParser; import org.elasticsearch.search.aggregations.bucket.missing.MissingParser;
import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested;
import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested; import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.InternalRange; import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.RangeParser; import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeParser; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeParser;
import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange; import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance; import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance;
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange; import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange;
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser; import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser;
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser;
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler; import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser;
import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
@ -155,50 +156,50 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsParser; import org.elasticsearch.search.aggregations.bucket.terms.TermsParser;
import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms;
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.avg.AvgParser; import org.elasticsearch.search.aggregations.metrics.avg.AvgParser;
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg;
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser;
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality; import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality;
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsParser; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsParser;
import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds; import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds;
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidParser; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidParser;
import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid; import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid;
import org.elasticsearch.search.aggregations.metrics.max.InternalMax; import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.max.MaxParser; import org.elasticsearch.search.aggregations.metrics.max.MaxParser;
import org.elasticsearch.search.aggregations.metrics.min.InternalMin; import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.min.MinParser; import org.elasticsearch.search.aggregations.metrics.min.MinParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric; import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric;
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.stats.StatsParser; import org.elasticsearch.search.aggregations.metrics.stats.StatsParser;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsParser; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsParser;
import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats;
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; import org.elasticsearch.search.aggregations.metrics.sum.InternalSum;
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.sum.SumParser; import org.elasticsearch.search.aggregations.metrics.sum.SumParser;
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount; import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser;
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@ -290,6 +291,8 @@ public class SearchModule extends AbstractModule {
private final Settings settings; private final Settings settings;
private final NamedWriteableRegistry namedWriteableRegistry; private final NamedWriteableRegistry namedWriteableRegistry;
public static final Setting<Integer> INDICES_MAX_CLAUSE_COUNT_SETTING = Setting.intSetting("indices.query.bool.max_clause_count",
1024, 1, Integer.MAX_VALUE, Setting.Property.NodeScope);
// pkg private so tests can mock // pkg private so tests can mock
Class<? extends SearchService> searchServiceImpl = SearchService.class; Class<? extends SearchService> searchServiceImpl = SearchService.class;
@ -421,10 +424,10 @@ public class SearchModule extends AbstractModule {
* @param aggregationName names by which the aggregation may be parsed. The first name is special because it is the name that the reader * @param aggregationName names by which the aggregation may be parsed. The first name is special because it is the name that the reader
* is registered under. * is registered under.
*/ */
public <AB extends AggregatorBuilder<AB>> void registerAggregation(Writeable.Reader<AB> reader, Aggregator.Parser aggregationParser, public <AB extends AggregationBuilder<AB>> void registerAggregation(Writeable.Reader<AB> reader, Aggregator.Parser aggregationParser,
ParseField aggregationName) { ParseField aggregationName) {
aggregationParserRegistry.register(aggregationParser, aggregationName); aggregationParserRegistry.register(aggregationParser, aggregationName);
namedWriteableRegistry.register(AggregatorBuilder.class, aggregationName.getPreferredName(), reader); namedWriteableRegistry.register(AggregationBuilder.class, aggregationName.getPreferredName(), reader);
} }
/** /**
@ -478,55 +481,57 @@ public class SearchModule extends AbstractModule {
} }
protected void configureAggs() { protected void configureAggs() {
registerAggregation(AvgAggregatorBuilder::new, new AvgParser(), AvgAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(AvgAggregationBuilder::new, new AvgParser(), AvgAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(SumAggregatorBuilder::new, new SumParser(), SumAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(SumAggregationBuilder::new, new SumParser(), SumAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(MinAggregatorBuilder::new, new MinParser(), MinAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(MinAggregationBuilder::new, new MinParser(), MinAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(MaxAggregatorBuilder::new, new MaxParser(), MaxAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(MaxAggregationBuilder::new, new MaxParser(), MaxAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(StatsAggregatorBuilder::new, new StatsParser(), StatsAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(StatsAggregationBuilder::new, new StatsParser(), StatsAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(ExtendedStatsAggregatorBuilder::new, new ExtendedStatsParser(), registerAggregation(ExtendedStatsAggregationBuilder::new, new ExtendedStatsParser(),
ExtendedStatsAggregatorBuilder.AGGREGATION_NAME_FIELD); ExtendedStatsAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(ValueCountAggregatorBuilder::new, new ValueCountParser(), ValueCountAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(ValueCountAggregationBuilder::new, new ValueCountParser(), ValueCountAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(PercentilesAggregatorBuilder::new, new PercentilesParser(), registerAggregation(PercentilesAggregationBuilder::new, new PercentilesParser(),
PercentilesAggregatorBuilder.AGGREGATION_NAME_FIELD); PercentilesAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(PercentileRanksAggregatorBuilder::new, new PercentileRanksParser(), registerAggregation(PercentileRanksAggregationBuilder::new, new PercentileRanksParser(),
PercentileRanksAggregatorBuilder.AGGREGATION_NAME_FIELD); PercentileRanksAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(CardinalityAggregatorBuilder::new, new CardinalityParser(), registerAggregation(CardinalityAggregationBuilder::new, new CardinalityParser(),
CardinalityAggregatorBuilder.AGGREGATION_NAME_FIELD); CardinalityAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(GlobalAggregatorBuilder::new, GlobalAggregatorBuilder::parse, GlobalAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(GlobalAggregationBuilder::new, GlobalAggregationBuilder::parse,
registerAggregation(MissingAggregatorBuilder::new, new MissingParser(), MissingAggregatorBuilder.AGGREGATION_NAME_FIELD); GlobalAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(FilterAggregatorBuilder::new, FilterAggregatorBuilder::parse, FilterAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(MissingAggregationBuilder::new, new MissingParser(), MissingAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(FiltersAggregatorBuilder::new, FiltersAggregatorBuilder::parse, registerAggregation(FilterAggregationBuilder::new, FilterAggregationBuilder::parse,
FiltersAggregatorBuilder.AGGREGATION_NAME_FIELD); FilterAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(SamplerAggregatorBuilder::new, SamplerAggregatorBuilder::parse, registerAggregation(FiltersAggregationBuilder::new, FiltersAggregationBuilder::parse,
SamplerAggregatorBuilder.AGGREGATION_NAME_FIELD); FiltersAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(DiversifiedAggregatorBuilder::new, new DiversifiedSamplerParser(), registerAggregation(SamplerAggregationBuilder::new, SamplerAggregationBuilder::parse,
DiversifiedAggregatorBuilder.AGGREGATION_NAME_FIELD); SamplerAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(TermsAggregatorBuilder::new, new TermsParser(), TermsAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(DiversifiedAggregationBuilder::new, new DiversifiedSamplerParser(),
registerAggregation(SignificantTermsAggregatorBuilder::new, DiversifiedAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(TermsAggregationBuilder::new, new TermsParser(), TermsAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(SignificantTermsAggregationBuilder::new,
new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry), new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry),
SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD); SignificantTermsAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(RangeAggregationBuilder::new, new RangeParser(), RangeAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(DateRangeAggregationBuilder::new, new DateRangeParser(), DateRangeAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(IpRangeAggregatorBuilder::new, new IpRangeParser(), IpRangeAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(IpRangeAggregationBuilder::new, new IpRangeParser(), IpRangeAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(HistogramAggregatorBuilder::new, new HistogramParser(), HistogramAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(HistogramAggregationBuilder::new, new HistogramParser(), HistogramAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(DateHistogramAggregatorBuilder::new, new DateHistogramParser(), registerAggregation(DateHistogramAggregationBuilder::new, new DateHistogramParser(),
DateHistogramAggregatorBuilder.AGGREGATION_NAME_FIELD); DateHistogramAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(GeoDistanceAggregatorBuilder::new, new GeoDistanceParser(), registerAggregation(GeoDistanceAggregationBuilder::new, new GeoDistanceParser(),
GeoDistanceAggregatorBuilder.AGGREGATION_NAME_FIELD); GeoDistanceAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(GeoGridAggregatorBuilder::new, new GeoHashGridParser(), GeoGridAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(GeoGridAggregationBuilder::new, new GeoHashGridParser(), GeoGridAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(NestedAggregatorBuilder::new, NestedAggregatorBuilder::parse, NestedAggregatorBuilder.AGGREGATION_FIELD_NAME); registerAggregation(NestedAggregationBuilder::new, NestedAggregationBuilder::parse,
registerAggregation(ReverseNestedAggregatorBuilder::new, ReverseNestedAggregatorBuilder::parse, NestedAggregationBuilder.AGGREGATION_FIELD_NAME);
ReverseNestedAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(ReverseNestedAggregationBuilder::new, ReverseNestedAggregationBuilder::parse,
registerAggregation(TopHitsAggregatorBuilder::new, TopHitsAggregatorBuilder::parse, ReverseNestedAggregationBuilder.AGGREGATION_NAME_FIELD);
TopHitsAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(TopHitsAggregationBuilder::new, TopHitsAggregationBuilder::parse,
registerAggregation(GeoBoundsAggregatorBuilder::new, new GeoBoundsParser(), GeoBoundsAggregatorBuilder.AGGREGATION_NAME_FIED); TopHitsAggregationBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(GeoCentroidAggregatorBuilder::new, new GeoCentroidParser(), registerAggregation(GeoBoundsAggregationBuilder::new, new GeoBoundsParser(), GeoBoundsAggregationBuilder.AGGREGATION_NAME_FIED);
GeoCentroidAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(GeoCentroidAggregationBuilder::new, new GeoCentroidParser(),
registerAggregation(ScriptedMetricAggregatorBuilder::new, ScriptedMetricAggregatorBuilder::parse, GeoCentroidAggregationBuilder.AGGREGATION_NAME_FIELD);
ScriptedMetricAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(ScriptedMetricAggregationBuilder::new, ScriptedMetricAggregationBuilder::parse,
registerAggregation(ChildrenAggregatorBuilder::new, ChildrenAggregatorBuilder::parse, ScriptedMetricAggregationBuilder.AGGREGATION_NAME_FIELD);
ChildrenAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(ChildrenAggregationBuilder::new, ChildrenAggregationBuilder::parse,
ChildrenAggregationBuilder.AGGREGATION_NAME_FIELD);
registerPipelineAggregation(DerivativePipelineAggregatorBuilder::new, DerivativePipelineAggregatorBuilder::parse, registerPipelineAggregation(DerivativePipelineAggregatorBuilder::new, DerivativePipelineAggregatorBuilder::parse,
DerivativePipelineAggregatorBuilder.AGGREGATION_NAME_FIELD); DerivativePipelineAggregatorBuilder.AGGREGATION_NAME_FIELD);
registerPipelineAggregation(MaxBucketPipelineAggregatorBuilder::new, MaxBucketPipelineAggregatorBuilder.PARSER, registerPipelineAggregation(MaxBucketPipelineAggregatorBuilder::new, MaxBucketPipelineAggregatorBuilder.PARSER,
@ -650,8 +655,7 @@ public class SearchModule extends AbstractModule {
registerQuery(MatchAllQueryBuilder::new, MatchAllQueryBuilder::fromXContent, MatchAllQueryBuilder.QUERY_NAME_FIELD); registerQuery(MatchAllQueryBuilder::new, MatchAllQueryBuilder::fromXContent, MatchAllQueryBuilder.QUERY_NAME_FIELD);
registerQuery(QueryStringQueryBuilder::new, QueryStringQueryBuilder::fromXContent, QueryStringQueryBuilder.QUERY_NAME_FIELD); registerQuery(QueryStringQueryBuilder::new, QueryStringQueryBuilder::fromXContent, QueryStringQueryBuilder.QUERY_NAME_FIELD);
registerQuery(BoostingQueryBuilder::new, BoostingQueryBuilder::fromXContent, BoostingQueryBuilder.QUERY_NAME_FIELD); registerQuery(BoostingQueryBuilder::new, BoostingQueryBuilder::fromXContent, BoostingQueryBuilder.QUERY_NAME_FIELD);
BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", BooleanQuery.setMaxClauseCount(INDICES_MAX_CLAUSE_COUNT_SETTING.get(settings));
settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount())));
registerQuery(BoolQueryBuilder::new, BoolQueryBuilder::fromXContent, BoolQueryBuilder.QUERY_NAME_FIELD); registerQuery(BoolQueryBuilder::new, BoolQueryBuilder::fromXContent, BoolQueryBuilder.QUERY_NAME_FIELD);
registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD); registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD);
registerQuery(TermsQueryBuilder::new, TermsQueryBuilder::fromXContent, TermsQueryBuilder.QUERY_NAME_FIELD); registerQuery(TermsQueryBuilder::new, TermsQueryBuilder::fromXContent, TermsQueryBuilder.QUERY_NAME_FIELD);

View File

@ -36,7 +36,9 @@ import java.util.Objects;
/** /**
* A factory that knows how to create an {@link Aggregator} of a specific type. * A factory that knows how to create an {@link Aggregator} of a specific type.
*/ */
public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extends ToXContentToBytes implements NamedWriteable, ToXContent { public abstract class AggregationBuilder<AB extends AggregationBuilder<AB>>
extends ToXContentToBytes
implements NamedWriteable, ToXContent {
protected String name; protected String name;
protected Type type; protected Type type;
@ -44,12 +46,12 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
protected Map<String, Object> metaData; protected Map<String, Object> metaData;
/** /**
* Constructs a new aggregator factory. * Constructs a new aggregation builder.
* *
* @param name The aggregation name * @param name The aggregation name
* @param type The aggregation type * @param type The aggregation type
*/ */
public AggregatorBuilder(String name, Type type) { public AggregationBuilder(String name, Type type) {
if (name == null) { if (name == null) {
throw new IllegalArgumentException("[name] must not be null: [" + name + "]"); throw new IllegalArgumentException("[name] must not be null: [" + name + "]");
} }
@ -63,7 +65,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
/** /**
* Read from a stream. * Read from a stream.
*/ */
protected AggregatorBuilder(StreamInput in, Type type) throws IOException { protected AggregationBuilder(StreamInput in, Type type) throws IOException {
name = in.readString(); name = in.readString();
this.type = type; this.type = type;
factoriesBuilder = new AggregatorFactories.Builder(in); factoriesBuilder = new AggregatorFactories.Builder(in);
@ -84,7 +86,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
* Add a sub aggregation to this aggregation. * Add a sub aggregation to this aggregation.
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public AB subAggregation(AggregatorBuilder<?> aggregation) { public AB subAggregation(AggregationBuilder<?> aggregation) {
if (aggregation == null) { if (aggregation == null) {
throw new IllegalArgumentException("[aggregation] must not be null: [" + name + "]"); throw new IllegalArgumentException("[aggregation] must not be null: [" + name + "]");
} }
@ -178,7 +180,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
if (getClass() != obj.getClass()) if (getClass() != obj.getClass())
return false; return false;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
AggregatorBuilder<AB> other = (AggregatorBuilder<AB>) obj; AggregationBuilder<AB> other = (AggregationBuilder<AB>) obj;
if (!Objects.equals(name, other.name)) if (!Objects.equals(name, other.name))
return false; return false;
if (!Objects.equals(type, other.type)) if (!Objects.equals(type, other.type))

View File

@ -22,65 +22,65 @@ import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.aggregations.bucket.children.Children; import org.elasticsearch.search.aggregations.bucket.children.Children;
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.filters.Filters; import org.elasticsearch.search.aggregations.bucket.filters.Filters;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid;
import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.missing.Missing; import org.elasticsearch.search.aggregations.bucket.missing.Missing;
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.nested.Nested;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested;
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler;
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.avg.Avg;
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality;
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds;
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid;
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.min.Min; import org.elasticsearch.search.aggregations.metrics.min.Min;
import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric;
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder; import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
/** /**
* Utility class to create aggregations. * Utility class to create aggregations.
@ -93,234 +93,234 @@ public class AggregationBuilders {
/** /**
* Create a new {@link ValueCount} aggregation with the given name. * Create a new {@link ValueCount} aggregation with the given name.
*/ */
public static ValueCountAggregatorBuilder count(String name) { public static ValueCountAggregationBuilder count(String name) {
return new ValueCountAggregatorBuilder(name, null); return new ValueCountAggregationBuilder(name, null);
} }
/** /**
* Create a new {@link Avg} aggregation with the given name. * Create a new {@link Avg} aggregation with the given name.
*/ */
public static AvgAggregatorBuilder avg(String name) { public static AvgAggregationBuilder avg(String name) {
return new AvgAggregatorBuilder(name); return new AvgAggregationBuilder(name);
} }
/** /**
* Create a new {@link Max} aggregation with the given name. * Create a new {@link Max} aggregation with the given name.
*/ */
public static MaxAggregatorBuilder max(String name) { public static MaxAggregationBuilder max(String name) {
return new MaxAggregatorBuilder(name); return new MaxAggregationBuilder(name);
} }
/** /**
* Create a new {@link Min} aggregation with the given name. * Create a new {@link Min} aggregation with the given name.
*/ */
public static MinAggregatorBuilder min(String name) { public static MinAggregationBuilder min(String name) {
return new MinAggregatorBuilder(name); return new MinAggregationBuilder(name);
} }
/** /**
* Create a new {@link Sum} aggregation with the given name. * Create a new {@link Sum} aggregation with the given name.
*/ */
public static SumAggregatorBuilder sum(String name) { public static SumAggregationBuilder sum(String name) {
return new SumAggregatorBuilder(name); return new SumAggregationBuilder(name);
} }
/** /**
* Create a new {@link Stats} aggregation with the given name. * Create a new {@link Stats} aggregation with the given name.
*/ */
public static StatsAggregatorBuilder stats(String name) { public static StatsAggregationBuilder stats(String name) {
return new StatsAggregatorBuilder(name); return new StatsAggregationBuilder(name);
} }
/** /**
* Create a new {@link ExtendedStats} aggregation with the given name. * Create a new {@link ExtendedStats} aggregation with the given name.
*/ */
public static ExtendedStatsAggregatorBuilder extendedStats(String name) { public static ExtendedStatsAggregationBuilder extendedStats(String name) {
return new ExtendedStatsAggregatorBuilder(name); return new ExtendedStatsAggregationBuilder(name);
} }
/** /**
* Create a new {@link Filter} aggregation with the given name. * Create a new {@link Filter} aggregation with the given name.
*/ */
public static FilterAggregatorBuilder filter(String name, QueryBuilder filter) { public static FilterAggregationBuilder filter(String name, QueryBuilder filter) {
return new FilterAggregatorBuilder(name, filter); return new FilterAggregationBuilder(name, filter);
} }
/** /**
* Create a new {@link Filters} aggregation with the given name. * Create a new {@link Filters} aggregation with the given name.
*/ */
public static FiltersAggregatorBuilder filters(String name, KeyedFilter... filters) { public static FiltersAggregationBuilder filters(String name, KeyedFilter... filters) {
return new FiltersAggregatorBuilder(name, filters); return new FiltersAggregationBuilder(name, filters);
} }
/** /**
* Create a new {@link Filters} aggregation with the given name. * Create a new {@link Filters} aggregation with the given name.
*/ */
public static FiltersAggregatorBuilder filters(String name, QueryBuilder... filters) { public static FiltersAggregationBuilder filters(String name, QueryBuilder... filters) {
return new FiltersAggregatorBuilder(name, filters); return new FiltersAggregationBuilder(name, filters);
} }
/** /**
* Create a new {@link Sampler} aggregation with the given name. * Create a new {@link Sampler} aggregation with the given name.
*/ */
public static SamplerAggregatorBuilder sampler(String name) { public static SamplerAggregationBuilder sampler(String name) {
return new SamplerAggregatorBuilder(name); return new SamplerAggregationBuilder(name);
} }
/** /**
* Create a new {@link Sampler} aggregation with the given name. * Create a new {@link Sampler} aggregation with the given name.
*/ */
public static DiversifiedAggregatorBuilder diversifiedSampler(String name) { public static DiversifiedAggregationBuilder diversifiedSampler(String name) {
return new DiversifiedAggregatorBuilder(name); return new DiversifiedAggregationBuilder(name);
} }
/** /**
* Create a new {@link Global} aggregation with the given name. * Create a new {@link Global} aggregation with the given name.
*/ */
public static GlobalAggregatorBuilder global(String name) { public static GlobalAggregationBuilder global(String name) {
return new GlobalAggregatorBuilder(name); return new GlobalAggregationBuilder(name);
} }
/** /**
* Create a new {@link Missing} aggregation with the given name. * Create a new {@link Missing} aggregation with the given name.
*/ */
public static MissingAggregatorBuilder missing(String name) { public static MissingAggregationBuilder missing(String name) {
return new MissingAggregatorBuilder(name, null); return new MissingAggregationBuilder(name, null);
} }
/** /**
* Create a new {@link Nested} aggregation with the given name. * Create a new {@link Nested} aggregation with the given name.
*/ */
public static NestedAggregatorBuilder nested(String name, String path) { public static NestedAggregationBuilder nested(String name, String path) {
return new NestedAggregatorBuilder(name, path); return new NestedAggregationBuilder(name, path);
} }
/** /**
* Create a new {@link ReverseNested} aggregation with the given name. * Create a new {@link ReverseNested} aggregation with the given name.
*/ */
public static ReverseNestedAggregatorBuilder reverseNested(String name) { public static ReverseNestedAggregationBuilder reverseNested(String name) {
return new ReverseNestedAggregatorBuilder(name); return new ReverseNestedAggregationBuilder(name);
} }
/** /**
* Create a new {@link Children} aggregation with the given name. * Create a new {@link Children} aggregation with the given name.
*/ */
public static ChildrenAggregatorBuilder children(String name, String childType) { public static ChildrenAggregationBuilder children(String name, String childType) {
return new ChildrenAggregatorBuilder(name, childType); return new ChildrenAggregationBuilder(name, childType);
} }
/** /**
* Create a new {@link GeoDistance} aggregation with the given name. * Create a new {@link GeoDistance} aggregation with the given name.
*/ */
public static GeoDistanceAggregatorBuilder geoDistance(String name, GeoPoint origin) { public static GeoDistanceAggregationBuilder geoDistance(String name, GeoPoint origin) {
return new GeoDistanceAggregatorBuilder(name, origin); return new GeoDistanceAggregationBuilder(name, origin);
} }
/** /**
* Create a new {@link Histogram} aggregation with the given name. * Create a new {@link Histogram} aggregation with the given name.
*/ */
public static HistogramAggregatorBuilder histogram(String name) { public static HistogramAggregationBuilder histogram(String name) {
return new HistogramAggregatorBuilder(name); return new HistogramAggregationBuilder(name);
} }
/** /**
* Create a new {@link GeoHashGrid} aggregation with the given name. * Create a new {@link GeoHashGrid} aggregation with the given name.
*/ */
public static GeoGridAggregatorBuilder geohashGrid(String name) { public static GeoGridAggregationBuilder geohashGrid(String name) {
return new GeoGridAggregatorBuilder(name); return new GeoGridAggregationBuilder(name);
} }
/** /**
* Create a new {@link SignificantTerms} aggregation with the given name. * Create a new {@link SignificantTerms} aggregation with the given name.
*/ */
public static SignificantTermsAggregatorBuilder significantTerms(String name) { public static SignificantTermsAggregationBuilder significantTerms(String name) {
return new SignificantTermsAggregatorBuilder(name, null); return new SignificantTermsAggregationBuilder(name, null);
} }
/** /**
* Create a new {@link DateHistogramAggregatorBuilder} aggregation with the given * Create a new {@link DateHistogramAggregationBuilder} aggregation with the given
* name. * name.
*/ */
public static DateHistogramAggregatorBuilder dateHistogram(String name) { public static DateHistogramAggregationBuilder dateHistogram(String name) {
return new DateHistogramAggregatorBuilder(name); return new DateHistogramAggregationBuilder(name);
} }
/** /**
* Create a new {@link Range} aggregation with the given name. * Create a new {@link Range} aggregation with the given name.
*/ */
public static RangeAggregatorBuilder range(String name) { public static RangeAggregationBuilder range(String name) {
return new RangeAggregatorBuilder(name); return new RangeAggregationBuilder(name);
} }
/** /**
* Create a new {@link DateRangeAggregatorBuilder} aggregation with the * Create a new {@link DateRangeAggregationBuilder} aggregation with the
* given name. * given name.
*/ */
public static DateRangeAggregatorBuilder dateRange(String name) { public static DateRangeAggregationBuilder dateRange(String name) {
return new DateRangeAggregatorBuilder(name); return new DateRangeAggregationBuilder(name);
} }
/** /**
* Create a new {@link IpRangeAggregatorBuilder} aggregation with the * Create a new {@link IpRangeAggregationBuilder} aggregation with the
* given name. * given name.
*/ */
public static IpRangeAggregatorBuilder ipRange(String name) { public static IpRangeAggregationBuilder ipRange(String name) {
return new IpRangeAggregatorBuilder(name); return new IpRangeAggregationBuilder(name);
} }
/** /**
* Create a new {@link Terms} aggregation with the given name. * Create a new {@link Terms} aggregation with the given name.
*/ */
public static TermsAggregatorBuilder terms(String name) { public static TermsAggregationBuilder terms(String name) {
return new TermsAggregatorBuilder(name, null); return new TermsAggregationBuilder(name, null);
} }
/** /**
* Create a new {@link Percentiles} aggregation with the given name. * Create a new {@link Percentiles} aggregation with the given name.
*/ */
public static PercentilesAggregatorBuilder percentiles(String name) { public static PercentilesAggregationBuilder percentiles(String name) {
return new PercentilesAggregatorBuilder(name); return new PercentilesAggregationBuilder(name);
} }
/** /**
* Create a new {@link PercentileRanks} aggregation with the given name. * Create a new {@link PercentileRanks} aggregation with the given name.
*/ */
public static PercentileRanksAggregatorBuilder percentileRanks(String name) { public static PercentileRanksAggregationBuilder percentileRanks(String name) {
return new PercentileRanksAggregatorBuilder(name); return new PercentileRanksAggregationBuilder(name);
} }
/** /**
* Create a new {@link Cardinality} aggregation with the given name. * Create a new {@link Cardinality} aggregation with the given name.
*/ */
public static CardinalityAggregatorBuilder cardinality(String name) { public static CardinalityAggregationBuilder cardinality(String name) {
return new CardinalityAggregatorBuilder(name, null); return new CardinalityAggregationBuilder(name, null);
} }
/** /**
* Create a new {@link TopHits} aggregation with the given name. * Create a new {@link TopHits} aggregation with the given name.
*/ */
public static TopHitsAggregatorBuilder topHits(String name) { public static TopHitsAggregationBuilder topHits(String name) {
return new TopHitsAggregatorBuilder(name); return new TopHitsAggregationBuilder(name);
} }
/** /**
* Create a new {@link GeoBounds} aggregation with the given name. * Create a new {@link GeoBounds} aggregation with the given name.
*/ */
public static GeoBoundsAggregatorBuilder geoBounds(String name) { public static GeoBoundsAggregationBuilder geoBounds(String name) {
return new GeoBoundsAggregatorBuilder(name); return new GeoBoundsAggregationBuilder(name);
} }
/** /**
* Create a new {@link GeoCentroid} aggregation with the given name. * Create a new {@link GeoCentroid} aggregation with the given name.
*/ */
public static GeoCentroidAggregatorBuilder geoCentroid(String name) { public static GeoCentroidAggregationBuilder geoCentroid(String name) {
return new GeoCentroidAggregatorBuilder(name); return new GeoCentroidAggregationBuilder(name);
} }
/** /**
* Create a new {@link ScriptedMetric} aggregation with the given name. * Create a new {@link ScriptedMetric} aggregation with the given name.
*/ */
public static ScriptedMetricAggregatorBuilder scriptedMetric(String name) { public static ScriptedMetricAggregationBuilder scriptedMetric(String name) {
return new ScriptedMetricAggregatorBuilder(name); return new ScriptedMetricAggregationBuilder(name);
} }
} }

View File

@ -42,7 +42,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
/** /**
* Parses the aggregation request and creates the appropriate aggregator factory for it. * Parses the aggregation request and creates the appropriate aggregator factory for it.
* *
* @see AggregatorBuilder * @see AggregationBuilder
*/ */
@FunctionalInterface @FunctionalInterface
public interface Parser { public interface Parser {
@ -55,7 +55,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
* @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped * @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped
* @throws java.io.IOException When parsing fails * @throws java.io.IOException When parsing fails
*/ */
AggregatorBuilder<?> parse(String aggregationName, QueryParseContext context) throws IOException; AggregationBuilder<?> parse(String aggregationName, QueryParseContext context) throws IOException;
} }
/** /**

View File

@ -124,7 +124,7 @@ public class AggregatorFactories {
public static class Builder extends ToXContentToBytes implements Writeable { public static class Builder extends ToXContentToBytes implements Writeable {
private final Set<String> names = new HashSet<>(); private final Set<String> names = new HashSet<>();
private final List<AggregatorBuilder<?>> aggregatorBuilders = new ArrayList<>(); private final List<AggregationBuilder<?>> aggregationBuilders = new ArrayList<>();
private final List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders = new ArrayList<>(); private final List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders = new ArrayList<>();
private boolean skipResolveOrder; private boolean skipResolveOrder;
@ -140,7 +140,7 @@ public class AggregatorFactories {
public Builder(StreamInput in) throws IOException { public Builder(StreamInput in) throws IOException {
int factoriesSize = in.readVInt(); int factoriesSize = in.readVInt();
for (int i = 0; i < factoriesSize; i++) { for (int i = 0; i < factoriesSize; i++) {
addAggregator(in.readNamedWriteable(AggregatorBuilder.class)); addAggregator(in.readNamedWriteable(AggregationBuilder.class));
} }
int pipelineFactoriesSize = in.readVInt(); int pipelineFactoriesSize = in.readVInt();
for (int i = 0; i < pipelineFactoriesSize; i++) { for (int i = 0; i < pipelineFactoriesSize; i++) {
@ -150,8 +150,8 @@ public class AggregatorFactories {
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(this.aggregatorBuilders.size()); out.writeVInt(this.aggregationBuilders.size());
for (AggregatorBuilder<?> factory : aggregatorBuilders) { for (AggregationBuilder<?> factory : aggregationBuilders) {
out.writeNamedWriteable(factory); out.writeNamedWriteable(factory);
} }
out.writeVInt(this.pipelineAggregatorBuilders.size()); out.writeVInt(this.pipelineAggregatorBuilders.size());
@ -164,11 +164,11 @@ public class AggregatorFactories {
throw new UnsupportedOperationException("This needs to be removed"); throw new UnsupportedOperationException("This needs to be removed");
} }
public Builder addAggregator(AggregatorBuilder<?> factory) { public Builder addAggregator(AggregationBuilder<?> factory) {
if (!names.add(factory.name)) { if (!names.add(factory.name)) {
throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]"); throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
} }
aggregatorBuilders.add(factory); aggregationBuilders.add(factory);
return this; return this;
} }
@ -186,30 +186,30 @@ public class AggregatorFactories {
} }
public AggregatorFactories build(AggregationContext context, AggregatorFactory<?> parent) throws IOException { public AggregatorFactories build(AggregationContext context, AggregatorFactory<?> parent) throws IOException {
if (aggregatorBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) { if (aggregationBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) {
return EMPTY; return EMPTY;
} }
List<PipelineAggregatorBuilder<?>> orderedpipelineAggregators = null; List<PipelineAggregatorBuilder<?>> orderedpipelineAggregators = null;
if (skipResolveOrder) { if (skipResolveOrder) {
orderedpipelineAggregators = new ArrayList<>(pipelineAggregatorBuilders); orderedpipelineAggregators = new ArrayList<>(pipelineAggregatorBuilders);
} else { } else {
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregatorBuilders); orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregationBuilders);
} }
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregatorBuilders.size()]; AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregationBuilders.size()];
for (int i = 0; i < aggregatorBuilders.size(); i++) { for (int i = 0; i < aggregationBuilders.size(); i++) {
aggFactories[i] = aggregatorBuilders.get(i).build(context, parent); aggFactories[i] = aggregationBuilders.get(i).build(context, parent);
} }
return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators); return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators);
} }
private List<PipelineAggregatorBuilder<?>> resolvePipelineAggregatorOrder( private List<PipelineAggregatorBuilder<?>> resolvePipelineAggregatorOrder(
List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders, List<AggregatorBuilder<?>> aggBuilders) { List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders, List<AggregationBuilder<?>> aggBuilders) {
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap = new HashMap<>(); Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap = new HashMap<>();
for (PipelineAggregatorBuilder<?> builder : pipelineAggregatorBuilders) { for (PipelineAggregatorBuilder<?> builder : pipelineAggregatorBuilders) {
pipelineAggregatorBuildersMap.put(builder.getName(), builder); pipelineAggregatorBuildersMap.put(builder.getName(), builder);
} }
Map<String, AggregatorBuilder<?>> aggBuildersMap = new HashMap<>(); Map<String, AggregationBuilder<?>> aggBuildersMap = new HashMap<>();
for (AggregatorBuilder<?> aggBuilder : aggBuilders) { for (AggregationBuilder<?> aggBuilder : aggBuilders) {
aggBuildersMap.put(aggBuilder.name, aggBuilder); aggBuildersMap.put(aggBuilder.name, aggBuilder);
} }
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregatorrs = new LinkedList<>(); List<PipelineAggregatorBuilder<?>> orderedPipelineAggregatorrs = new LinkedList<>();
@ -223,7 +223,7 @@ public class AggregatorFactories {
return orderedPipelineAggregatorrs; return orderedPipelineAggregatorrs;
} }
private void resolvePipelineAggregatorOrder(Map<String, AggregatorBuilder<?>> aggBuildersMap, private void resolvePipelineAggregatorOrder(Map<String, AggregationBuilder<?>> aggBuildersMap,
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap, Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap,
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregators, List<PipelineAggregatorBuilder<?>> unmarkedBuilders, List<PipelineAggregatorBuilder<?>> orderedPipelineAggregators, List<PipelineAggregatorBuilder<?>> unmarkedBuilders,
Set<PipelineAggregatorBuilder<?>> temporarilyMarked, PipelineAggregatorBuilder<?> builder) { Set<PipelineAggregatorBuilder<?>> temporarilyMarked, PipelineAggregatorBuilder<?> builder) {
@ -238,7 +238,7 @@ public class AggregatorFactories {
if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) { if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) {
continue; continue;
} else if (aggBuildersMap.containsKey(firstAggName)) { } else if (aggBuildersMap.containsKey(firstAggName)) {
AggregatorBuilder<?> aggBuilder = aggBuildersMap.get(firstAggName); AggregationBuilder<?> aggBuilder = aggBuildersMap.get(firstAggName);
for (int i = 1; i < bucketsPathElements.size(); i++) { for (int i = 1; i < bucketsPathElements.size(); i++) {
PathElement pathElement = bucketsPathElements.get(i); PathElement pathElement = bucketsPathElements.get(i);
String aggName = pathElement.name; String aggName = pathElement.name;
@ -247,9 +247,9 @@ public class AggregatorFactories {
} else { } else {
// Check the non-pipeline sub-aggregator // Check the non-pipeline sub-aggregator
// factories // factories
AggregatorBuilder<?>[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories(); AggregationBuilder<?>[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories();
boolean foundSubBuilder = false; boolean foundSubBuilder = false;
for (AggregatorBuilder<?> subBuilder : subBuilders) { for (AggregationBuilder<?> subBuilder : subBuilders) {
if (aggName.equals(subBuilder.name)) { if (aggName.equals(subBuilder.name)) {
aggBuilder = subBuilder; aggBuilder = subBuilder;
foundSubBuilder = true; foundSubBuilder = true;
@ -289,8 +289,8 @@ public class AggregatorFactories {
} }
} }
AggregatorBuilder<?>[] getAggregatorFactories() { AggregationBuilder<?>[] getAggregatorFactories() {
return this.aggregatorBuilders.toArray(new AggregatorBuilder<?>[this.aggregatorBuilders.size()]); return this.aggregationBuilders.toArray(new AggregationBuilder<?>[this.aggregationBuilders.size()]);
} }
List<PipelineAggregatorBuilder<?>> getPipelineAggregatorFactories() { List<PipelineAggregatorBuilder<?>> getPipelineAggregatorFactories() {
@ -298,14 +298,14 @@ public class AggregatorFactories {
} }
public int count() { public int count() {
return aggregatorBuilders.size() + pipelineAggregatorBuilders.size(); return aggregationBuilders.size() + pipelineAggregatorBuilders.size();
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
if (aggregatorBuilders != null) { if (aggregationBuilders != null) {
for (AggregatorBuilder<?> subAgg : aggregatorBuilders) { for (AggregationBuilder<?> subAgg : aggregationBuilders) {
subAgg.toXContent(builder, params); subAgg.toXContent(builder, params);
} }
} }
@ -320,7 +320,7 @@ public class AggregatorFactories {
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(aggregatorBuilders, pipelineAggregatorBuilders); return Objects.hash(aggregationBuilders, pipelineAggregatorBuilders);
} }
@Override @Override
@ -330,7 +330,7 @@ public class AggregatorFactories {
if (getClass() != obj.getClass()) if (getClass() != obj.getClass())
return false; return false;
Builder other = (Builder) obj; Builder other = (Builder) obj;
if (!Objects.equals(aggregatorBuilders, other.aggregatorBuilders)) if (!Objects.equals(aggregationBuilders, other.aggregationBuilders))
return false; return false;
if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders)) if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders))
return false; return false;

View File

@ -104,7 +104,7 @@ public class AggregatorParsers {
+ token + "], expected a [" + XContentParser.Token.START_OBJECT + "]."); + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
} }
AggregatorBuilder<?> aggFactory = null; AggregationBuilder<?> aggFactory = null;
PipelineAggregatorBuilder<?> pipelineAggregatorFactory = null; PipelineAggregatorBuilder<?> pipelineAggregatorFactory = null;
AggregatorFactories.Builder subFactories = null; AggregatorFactories.Builder subFactories = null;

View File

@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild; import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSourceType;
@ -44,7 +44,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<ParentChild, ChildrenAggregatorBuilder> { public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<ParentChild, ChildrenAggregationBuilder> {
public static final String NAME = InternalChildren.TYPE.name(); public static final String NAME = InternalChildren.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
@ -59,7 +59,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
* @param childType * @param childType
* the type of children documents * the type of children documents
*/ */
public ChildrenAggregatorBuilder(String name, String childType) { public ChildrenAggregationBuilder(String name, String childType) {
super(name, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING); super(name, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING);
if (childType == null) { if (childType == null) {
throw new IllegalArgumentException("[childType] must not be null: [" + name + "]"); throw new IllegalArgumentException("[childType] must not be null: [" + name + "]");
@ -70,7 +70,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
/** /**
* Read from a stream. * Read from a stream.
*/ */
public ChildrenAggregatorBuilder(StreamInput in) throws IOException { public ChildrenAggregationBuilder(StreamInput in) throws IOException {
super(in, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING); super(in, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING);
childType = in.readString(); childType = in.readString();
} }
@ -121,7 +121,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
return builder; return builder;
} }
public static ChildrenAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static ChildrenAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
String childType = null; String childType = null;
XContentParser.Token token; XContentParser.Token token;
@ -148,7 +148,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
} }
return new ChildrenAggregatorBuilder(aggregationName, childType); return new ChildrenAggregationBuilder(aggregationName, childType);
} }
@Override @Override
@ -158,7 +158,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
@Override @Override
protected boolean innerEquals(Object obj) { protected boolean innerEquals(Object obj) {
ChildrenAggregatorBuilder other = (ChildrenAggregatorBuilder) obj; ChildrenAggregationBuilder other = (ChildrenAggregationBuilder) obj;
return Objects.equals(childType, other.childType); return Objects.equals(childType, other.childType);
} }

View File

@ -24,12 +24,11 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.EmptyQueryBuilder; import org.elasticsearch.index.query.EmptyQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
@ -37,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorBuilder> { public class FilterAggregationBuilder extends AggregationBuilder<FilterAggregationBuilder> {
public static final String NAME = InternalFilter.TYPE.name(); public static final String NAME = InternalFilter.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
@ -51,7 +50,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
* filter will fall into the bucket defined by this * filter will fall into the bucket defined by this
* {@link Filter} aggregation. * {@link Filter} aggregation.
*/ */
public FilterAggregatorBuilder(String name, QueryBuilder filter) { public FilterAggregationBuilder(String name, QueryBuilder filter) {
super(name, InternalFilter.TYPE); super(name, InternalFilter.TYPE);
if (filter == null) { if (filter == null) {
throw new IllegalArgumentException("[filter] must not be null: [" + name + "]"); throw new IllegalArgumentException("[filter] must not be null: [" + name + "]");
@ -66,7 +65,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
/** /**
* Read from a stream. * Read from a stream.
*/ */
public FilterAggregatorBuilder(StreamInput in) throws IOException { public FilterAggregationBuilder(StreamInput in) throws IOException {
super(in, InternalFilter.TYPE); super(in, InternalFilter.TYPE);
filter = in.readNamedWriteable(QueryBuilder.class); filter = in.readNamedWriteable(QueryBuilder.class);
} }
@ -90,7 +89,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
return builder; return builder;
} }
public static FilterAggregatorBuilder parse(String aggregationName, QueryParseContext context) public static FilterAggregationBuilder parse(String aggregationName, QueryParseContext context)
throws IOException { throws IOException {
QueryBuilder filter = context.parseInnerQueryBuilder(); QueryBuilder filter = context.parseInnerQueryBuilder();
@ -98,7 +97,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
throw new ParsingException(null, "filter cannot be null in filter aggregation [{}]", aggregationName); throw new ParsingException(null, "filter cannot be null in filter aggregation [{}]", aggregationName);
} }
return new FilterAggregatorBuilder(aggregationName, filter); return new FilterAggregationBuilder(aggregationName, filter);
} }
@ -109,7 +108,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
@Override @Override
protected boolean doEquals(Object obj) { protected boolean doEquals(Object obj) {
FilterAggregatorBuilder other = (FilterAggregatorBuilder) obj; FilterAggregationBuilder other = (FilterAggregationBuilder) obj;
return Objects.equals(filter, other.filter); return Objects.equals(filter, other.filter);
} }

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
@ -43,7 +43,7 @@ import java.util.Objects;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregatorBuilder> { public class FiltersAggregationBuilder extends AggregationBuilder<FiltersAggregationBuilder> {
public static final String NAME = InternalFilters.TYPE.name(); public static final String NAME = InternalFilters.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
@ -62,11 +62,11 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
* @param filters * @param filters
* the KeyedFilters to use with this aggregation. * the KeyedFilters to use with this aggregation.
*/ */
public FiltersAggregatorBuilder(String name, KeyedFilter... filters) { public FiltersAggregationBuilder(String name, KeyedFilter... filters) {
this(name, Arrays.asList(filters)); this(name, Arrays.asList(filters));
} }
private FiltersAggregatorBuilder(String name, List<KeyedFilter> filters) { private FiltersAggregationBuilder(String name, List<KeyedFilter> filters) {
super(name, InternalFilters.TYPE); super(name, InternalFilters.TYPE);
// internally we want to have a fixed order of filters, regardless of the order of the filters in the request // internally we want to have a fixed order of filters, regardless of the order of the filters in the request
this.filters = new ArrayList<>(filters); this.filters = new ArrayList<>(filters);
@ -80,7 +80,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
* @param filters * @param filters
* the filters to use with this aggregation * the filters to use with this aggregation
*/ */
public FiltersAggregatorBuilder(String name, QueryBuilder... filters) { public FiltersAggregationBuilder(String name, QueryBuilder... filters) {
super(name, InternalFilters.TYPE); super(name, InternalFilters.TYPE);
List<KeyedFilter> keyedFilters = new ArrayList<>(filters.length); List<KeyedFilter> keyedFilters = new ArrayList<>(filters.length);
for (int i = 0; i < filters.length; i++) { for (int i = 0; i < filters.length; i++) {
@ -93,7 +93,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
/** /**
* Read from a stream. * Read from a stream.
*/ */
public FiltersAggregatorBuilder(StreamInput in) throws IOException { public FiltersAggregationBuilder(StreamInput in) throws IOException {
super(in, InternalFilters.TYPE); super(in, InternalFilters.TYPE);
keyed = in.readBoolean(); keyed = in.readBoolean();
int filtersSize = in.readVInt(); int filtersSize = in.readVInt();
@ -131,7 +131,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
/** /**
* Set whether to include a bucket for documents not matching any filter * Set whether to include a bucket for documents not matching any filter
*/ */
public FiltersAggregatorBuilder otherBucket(boolean otherBucket) { public FiltersAggregationBuilder otherBucket(boolean otherBucket) {
this.otherBucket = otherBucket; this.otherBucket = otherBucket;
return this; return this;
} }
@ -154,7 +154,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
* Set the key to use for the bucket for documents not matching any * Set the key to use for the bucket for documents not matching any
* filter. * filter.
*/ */
public FiltersAggregatorBuilder otherBucketKey(String otherBucketKey) { public FiltersAggregationBuilder otherBucketKey(String otherBucketKey) {
if (otherBucketKey == null) { if (otherBucketKey == null) {
throw new IllegalArgumentException("[otherBucketKey] must not be null: [" + name + "]"); throw new IllegalArgumentException("[otherBucketKey] must not be null: [" + name + "]");
} }
@ -199,7 +199,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
return builder; return builder;
} }
public static FiltersAggregatorBuilder parse(String aggregationName, QueryParseContext context) public static FiltersAggregationBuilder parse(String aggregationName, QueryParseContext context)
throws IOException { throws IOException {
XContentParser parser = context.parser(); XContentParser parser = context.parser();
@ -264,12 +264,12 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
otherBucketKey = "_other_"; otherBucketKey = "_other_";
} }
FiltersAggregatorBuilder factory; FiltersAggregationBuilder factory;
if (keyedFilters != null) { if (keyedFilters != null) {
factory = new FiltersAggregatorBuilder(aggregationName, factory = new FiltersAggregationBuilder(aggregationName,
keyedFilters.toArray(new FiltersAggregator.KeyedFilter[keyedFilters.size()])); keyedFilters.toArray(new FiltersAggregator.KeyedFilter[keyedFilters.size()]));
} else { } else {
factory = new FiltersAggregatorBuilder(aggregationName, factory = new FiltersAggregationBuilder(aggregationName,
nonKeyedFilters.toArray(new QueryBuilder[nonKeyedFilters.size()])); nonKeyedFilters.toArray(new QueryBuilder[nonKeyedFilters.size()]));
} }
if (otherBucket != null) { if (otherBucket != null) {
@ -288,7 +288,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
@Override @Override
protected boolean doEquals(Object obj) { protected boolean doEquals(Object obj) {
FiltersAggregatorBuilder other = (FiltersAggregatorBuilder) obj; FiltersAggregationBuilder other = (FiltersAggregationBuilder) obj;
return Objects.equals(filters, other.filters) return Objects.equals(filters, other.filters)
&& Objects.equals(keyed, other.keyed) && Objects.equals(keyed, other.keyed)
&& Objects.equals(otherBucket, other.otherBucket) && Objects.equals(otherBucket, other.otherBucket)

View File

@ -37,7 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSourceType;
@ -45,7 +45,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource.GeoPoint, GeoGridAggregatorBuilder> { public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoGridAggregationBuilder> {
public static final String NAME = InternalGeoHashGrid.TYPE.name(); public static final String NAME = InternalGeoHashGrid.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
@ -53,14 +53,14 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
private int requiredSize = GeoHashGridParser.DEFAULT_MAX_NUM_CELLS; private int requiredSize = GeoHashGridParser.DEFAULT_MAX_NUM_CELLS;
private int shardSize = -1; private int shardSize = -1;
public GeoGridAggregatorBuilder(String name) { public GeoGridAggregationBuilder(String name) {
super(name, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); super(name, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
} }
/** /**
* Read from a stream. * Read from a stream.
*/ */
public GeoGridAggregatorBuilder(StreamInput in) throws IOException { public GeoGridAggregationBuilder(StreamInput in) throws IOException {
super(in, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT); super(in, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
precision = in.readVInt(); precision = in.readVInt();
requiredSize = in.readVInt(); requiredSize = in.readVInt();
@ -74,7 +74,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
out.writeVInt(shardSize); out.writeVInt(shardSize);
} }
public GeoGridAggregatorBuilder precision(int precision) { public GeoGridAggregationBuilder precision(int precision) {
this.precision = GeoHashGridParams.checkPrecision(precision); this.precision = GeoHashGridParams.checkPrecision(precision);
return this; return this;
} }
@ -83,7 +83,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
return precision; return precision;
} }
public GeoGridAggregatorBuilder size(int size) { public GeoGridAggregationBuilder size(int size) {
if (size < -1) { if (size < -1) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"[size] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"); "[size] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
@ -96,7 +96,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
return requiredSize; return requiredSize;
} }
public GeoGridAggregatorBuilder shardSize(int shardSize) { public GeoGridAggregationBuilder shardSize(int shardSize) {
if (shardSize < -1) { if (shardSize < -1) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]"); "[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
@ -145,7 +145,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
@Override @Override
protected boolean innerEquals(Object obj) { protected boolean innerEquals(Object obj) {
GeoGridAggregatorBuilder other = (GeoGridAggregatorBuilder) obj; GeoGridAggregationBuilder other = (GeoGridAggregationBuilder) obj;
if (precision != other.precision) { if (precision != other.precision) {
return false; return false;
} }

View File

@ -46,10 +46,10 @@ public class GeoHashGridAggregator extends BucketsAggregator {
private final int requiredSize; private final int requiredSize;
private final int shardSize; private final int shardSize;
private final GeoGridAggregatorBuilder.CellIdSource valuesSource; private final GeoGridAggregationBuilder.CellIdSource valuesSource;
private final LongHash bucketOrds; private final LongHash bucketOrds;
public GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregatorBuilder.CellIdSource valuesSource, public GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregationBuilder.CellIdSource valuesSource,
int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators, int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException { Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);

View File

@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.NonCollectingAggregator;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder.CellIdSource; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;

View File

@ -45,10 +45,10 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser {
} }
@Override @Override
protected GeoGridAggregatorBuilder createFactory( protected GeoGridAggregationBuilder createFactory(
String aggregationName, ValuesSourceType valuesSourceType, String aggregationName, ValuesSourceType valuesSourceType,
ValueType targetValueType, Map<ParseField, Object> otherOptions) { ValueType targetValueType, Map<ParseField, Object> otherOptions) {
GeoGridAggregatorBuilder factory = new GeoGridAggregatorBuilder(aggregationName); GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(aggregationName);
Integer precision = (Integer) otherOptions.get(GeoHashGridParams.FIELD_PRECISION); Integer precision = (Integer) otherOptions.get(GeoHashGridParams.FIELD_PRECISION);
if (precision != null) { if (precision != null) {
factory.precision(precision); factory.precision(precision);

View File

@ -24,25 +24,25 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.aggregations.AggregatorBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException; import java.io.IOException;
public class GlobalAggregatorBuilder extends AggregatorBuilder<GlobalAggregatorBuilder> { public class GlobalAggregationBuilder extends AggregationBuilder<GlobalAggregationBuilder> {
public static final String NAME = InternalGlobal.TYPE.name(); public static final String NAME = InternalGlobal.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
public GlobalAggregatorBuilder(String name) { public GlobalAggregationBuilder(String name) {
super(name, InternalGlobal.TYPE); super(name, InternalGlobal.TYPE);
} }
/** /**
* Read from a stream. * Read from a stream.
*/ */
public GlobalAggregatorBuilder(StreamInput in) throws IOException { public GlobalAggregationBuilder(StreamInput in) throws IOException {
super(in, InternalGlobal.TYPE); super(in, InternalGlobal.TYPE);
} }
@ -64,9 +64,9 @@ public class GlobalAggregatorBuilder extends AggregatorBuilder<GlobalAggregatorB
return builder; return builder;
} }
public static GlobalAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException { public static GlobalAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
context.parser().nextToken(); context.parser().nextToken();
return new GlobalAggregatorBuilder(aggregationName); return new GlobalAggregationBuilder(aggregationName);
} }
@Override @Override

View File

@ -24,14 +24,14 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public abstract class AbstractHistogramBuilder<AB extends AbstractHistogramBuilder<AB>> public abstract class AbstractHistogramBuilder<AB extends AbstractHistogramBuilder<AB>>
extends ValuesSourceAggregatorBuilder<ValuesSource.Numeric, AB> { extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, AB> {
protected long interval; protected long interval;
protected long offset = 0; protected long offset = 0;

View File

@ -33,21 +33,21 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<DateHistogramAggregatorBuilder> { public class DateHistogramAggregationBuilder extends AbstractHistogramBuilder<DateHistogramAggregationBuilder> {
public static final String NAME = InternalDateHistogram.TYPE.name(); public static final String NAME = InternalDateHistogram.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
private DateHistogramInterval dateHistogramInterval; private DateHistogramInterval dateHistogramInterval;
public DateHistogramAggregatorBuilder(String name) { public DateHistogramAggregationBuilder(String name) {
super(name, InternalDateHistogram.HISTOGRAM_FACTORY); super(name, InternalDateHistogram.HISTOGRAM_FACTORY);
} }
/** /**
* Read from a stream. * Read from a stream.
*/ */
public DateHistogramAggregatorBuilder(StreamInput in) throws IOException { public DateHistogramAggregationBuilder(StreamInput in) throws IOException {
super(in, InternalDateHistogram.HISTOGRAM_FACTORY); super(in, InternalDateHistogram.HISTOGRAM_FACTORY);
dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
} }
@ -61,7 +61,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
/** /**
* Set the interval. * Set the interval.
*/ */
public DateHistogramAggregatorBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) { public DateHistogramAggregationBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) {
if (dateHistogramInterval == null) { if (dateHistogramInterval == null) {
throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [" + name + "]"); throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [" + name + "]");
} }
@ -69,7 +69,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
return this; return this;
} }
public DateHistogramAggregatorBuilder offset(String offset) { public DateHistogramAggregationBuilder offset(String offset) {
if (offset == null) { if (offset == null) {
throw new IllegalArgumentException("[offset] must not be null: [" + name + "]"); throw new IllegalArgumentException("[offset] must not be null: [" + name + "]");
} }
@ -79,12 +79,12 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
protected static long parseStringOffset(String offset) { protected static long parseStringOffset(String offset) {
if (offset.charAt(0) == '-') { if (offset.charAt(0) == '-') {
return -TimeValue return -TimeValue
.parseTimeValue(offset.substring(1), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset") .parseTimeValue(offset.substring(1), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset")
.millis(); .millis();
} }
int beginIndex = offset.charAt(0) == '+' ? 1 : 0; int beginIndex = offset.charAt(0) == '+' ? 1 : 0;
return TimeValue return TimeValue
.parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset") .parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset")
.millis(); .millis();
} }
@ -121,7 +121,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
@Override @Override
protected boolean innerEquals(Object obj) { protected boolean innerEquals(Object obj) {
DateHistogramAggregatorBuilder other = (DateHistogramAggregatorBuilder) obj; DateHistogramAggregationBuilder other = (DateHistogramAggregationBuilder) obj;
return super.innerEquals(obj) && Objects.equals(dateHistogramInterval, other.dateHistogramInterval); return super.innerEquals(obj) && Objects.equals(dateHistogramInterval, other.dateHistogramInterval);
} }
} }

View File

@ -42,9 +42,9 @@ public class DateHistogramParser extends HistogramParser {
} }
@Override @Override
protected DateHistogramAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType, protected DateHistogramAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
ValueType targetValueType, Map<ParseField, Object> otherOptions) { ValueType targetValueType, Map<ParseField, Object> otherOptions) {
DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder(aggregationName); DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(aggregationName);
Object interval = otherOptions.get(Rounding.Interval.INTERVAL_FIELD); Object interval = otherOptions.get(Rounding.Interval.INTERVAL_FIELD);
if (interval == null) { if (interval == null) {
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]"); throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
@ -89,6 +89,6 @@ public class DateHistogramParser extends HistogramParser {
@Override @Override
protected long parseStringOffset(String offset) throws IOException { protected long parseStringOffset(String offset) throws IOException {
return DateHistogramAggregatorBuilder.parseStringOffset(offset); return DateHistogramAggregationBuilder.parseStringOffset(offset);
} }
} }

View File

@ -29,18 +29,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException; import java.io.IOException;
public class HistogramAggregatorBuilder extends AbstractHistogramBuilder<HistogramAggregatorBuilder> { public class HistogramAggregationBuilder extends AbstractHistogramBuilder<HistogramAggregationBuilder> {
public static final String NAME = InternalHistogram.TYPE.name(); public static final String NAME = InternalHistogram.TYPE.name();
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
public HistogramAggregatorBuilder(String name) { public HistogramAggregationBuilder(String name) {
super(name, InternalHistogram.HISTOGRAM_FACTORY); super(name, InternalHistogram.HISTOGRAM_FACTORY);
} }
/** /**
* Read from a stream. * Read from a stream.
*/ */
public HistogramAggregatorBuilder(StreamInput in) throws IOException { public HistogramAggregationBuilder(StreamInput in) throws IOException {
super(in, InternalHistogram.HISTOGRAM_FACTORY); super(in, InternalHistogram.HISTOGRAM_FACTORY);
} }

View File

@ -47,7 +47,7 @@ public class HistogramParser extends NumericValuesSourceParser {
@Override @Override
protected AbstractHistogramBuilder<?> createFactory(String aggregationName, ValuesSourceType valuesSourceType, protected AbstractHistogramBuilder<?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
ValueType targetValueType, Map<ParseField, Object> otherOptions) { ValueType targetValueType, Map<ParseField, Object> otherOptions) {
HistogramAggregatorBuilder factory = new HistogramAggregatorBuilder(aggregationName); HistogramAggregationBuilder factory = new HistogramAggregationBuilder(aggregationName);
Long interval = (Long) otherOptions.get(Rounding.Interval.INTERVAL_FIELD); Long interval = (Long) otherOptions.get(Rounding.Interval.INTERVAL_FIELD);
if (interval == null) { if (interval == null) {
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]"); throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");

Some files were not shown because too many files have changed in this diff Show More