Merge branch 'master' into enhancement/use_shard_bulk_for_single_ops
This commit is contained in:
commit
e93fdb8460
|
@ -430,15 +430,33 @@ cd $BATS_ARCHIVES
|
||||||
sudo -E bats $BATS_TESTS/*.bats
|
sudo -E bats $BATS_TESTS/*.bats
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
|
|
||||||
Note: Starting vagrant VM outside of the elasticsearch folder requires to
|
You can also use Gradle to prepare the test environment and then starts a single VM:
|
||||||
indicates the folder that contains the Vagrantfile using the VAGRANT_CWD
|
|
||||||
environment variable:
|
|
||||||
|
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
gradle vagrantSetUp
|
gradle vagrantFedora24#up
|
||||||
VAGRANT_CWD=/path/to/elasticsearch vagrant up centos-7 --provider virtualbox
|
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
|
|
||||||
|
Or any of vagrantCentos6#up, vagrantDebian8#up, vagrantFedora24#up, vagrantOel6#up,
|
||||||
|
vagrantOel7#up, vagrantOpensuse13#up, vagrantSles12#up, vagrantUbuntu1204#up,
|
||||||
|
vagrantUbuntu1604#up.
|
||||||
|
|
||||||
|
Once up, you can then connect to the VM using SSH from the elasticsearch directory:
|
||||||
|
|
||||||
|
-------------------------------------------------
|
||||||
|
vagrant ssh fedora-24
|
||||||
|
-------------------------------------------------
|
||||||
|
|
||||||
|
Or from another directory:
|
||||||
|
|
||||||
|
-------------------------------------------------
|
||||||
|
VAGRANT_CWD=/path/to/elasticsearch vagrant ssh fedora-24
|
||||||
|
-------------------------------------------------
|
||||||
|
|
||||||
|
Note: Starting vagrant VM outside of the elasticsearch folder requires to
|
||||||
|
indicates the folder that contains the Vagrantfile using the VAGRANT_CWD
|
||||||
|
environment variable.
|
||||||
|
|
||||||
|
|
||||||
== Coverage analysis
|
== Coverage analysis
|
||||||
|
|
||||||
Tests can be run instrumented with jacoco to produce a coverage report in
|
Tests can be run instrumented with jacoco to produce a coverage report in
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class RestTestPlugin implements Plugin<Project> {
|
||||||
if (false == REQUIRED_PLUGINS.any {project.pluginManager.hasPlugin(it)}) {
|
if (false == REQUIRED_PLUGINS.any {project.pluginManager.hasPlugin(it)}) {
|
||||||
throw new InvalidUserDataException('elasticsearch.rest-test '
|
throw new InvalidUserDataException('elasticsearch.rest-test '
|
||||||
+ 'requires either elasticsearch.build or '
|
+ 'requires either elasticsearch.build or '
|
||||||
+ 'elasticsearch.standalone-test')
|
+ 'elasticsearch.standalone-rest-test')
|
||||||
}
|
}
|
||||||
|
|
||||||
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
|
||||||
|
|
|
@ -40,9 +40,9 @@ public class StandaloneRestTestPlugin implements Plugin<Project> {
|
||||||
@Override
|
@Override
|
||||||
public void apply(Project project) {
|
public void apply(Project project) {
|
||||||
if (project.pluginManager.hasPlugin('elasticsearch.build')) {
|
if (project.pluginManager.hasPlugin('elasticsearch.build')) {
|
||||||
throw new InvalidUserDataException('elasticsearch.standalone-test, '
|
throw new InvalidUserDataException('elasticsearch.standalone-test '
|
||||||
+ 'elasticsearch.standalone-test, and elasticsearch.build are '
|
+ 'elasticsearch.standalone-rest-test, and elasticsearch.build '
|
||||||
+ 'mutually exclusive')
|
+ 'are mutually exclusive')
|
||||||
}
|
}
|
||||||
project.pluginManager.apply(JavaBasePlugin)
|
project.pluginManager.apply(JavaBasePlugin)
|
||||||
project.pluginManager.apply(RandomizedTestingPlugin)
|
project.pluginManager.apply(RandomizedTestingPlugin)
|
||||||
|
|
|
@ -404,10 +404,6 @@ class VagrantTestPlugin implements Plugin<Project> {
|
||||||
args 'halt', box
|
args 'halt', box
|
||||||
}
|
}
|
||||||
stop.dependsOn(halt)
|
stop.dependsOn(halt)
|
||||||
if (project.extensions.esvagrant.boxes.contains(box) == false) {
|
|
||||||
// we only need a halt task if this box was not specified
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Task update = project.tasks.create("vagrant${boxTask}#update", VagrantCommandTask) {
|
Task update = project.tasks.create("vagrant${boxTask}#update", VagrantCommandTask) {
|
||||||
boxName box
|
boxName box
|
||||||
|
@ -435,6 +431,11 @@ class VagrantTestPlugin implements Plugin<Project> {
|
||||||
dependsOn update
|
dependsOn update
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (project.extensions.esvagrant.boxes.contains(box) == false) {
|
||||||
|
// we d'ont need tests tasks if this box was not specified
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
Task smoke = project.tasks.create("vagrant${boxTask}#smoketest", Exec) {
|
Task smoke = project.tasks.create("vagrant${boxTask}#smoketest", Exec) {
|
||||||
environment vagrantEnvVars
|
environment vagrantEnvVars
|
||||||
dependsOn up
|
dependsOn up
|
||||||
|
|
|
@ -443,7 +443,6 @@
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]fs[/\\]FsRepository.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]fs[/\\]FsRepository.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]uri[/\\]URLIndexShardRepository.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]uri[/\\]URLIndexShardRepository.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]uri[/\\]URLRepository.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]uri[/\\]URLRepository.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]BytesRestResponse.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]RestController.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]RestController.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestCountAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestCountAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" />
|
||||||
|
@ -468,7 +467,6 @@
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]filters[/\\]InternalFilters.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]filters[/\\]InternalFilters.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]geogrid[/\\]GeoHashGridAggregator.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]geogrid[/\\]GeoHashGridAggregator.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]global[/\\]GlobalAggregator.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]global[/\\]GlobalAggregator.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]global[/\\]InternalGlobal.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]histogram[/\\]HistogramAggregator.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]histogram[/\\]HistogramAggregator.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]InternalMissing.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]InternalMissing.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]MissingAggregator.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]MissingAggregator.java" checks="LineLength" />
|
||||||
|
|
|
@ -24,9 +24,12 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||||
import org.elasticsearch.action.support.WriteResponse;
|
import org.elasticsearch.action.support.WriteResponse;
|
||||||
import org.elasticsearch.action.support.replication.ReplicationResponse;
|
import org.elasticsearch.action.support.replication.ReplicationResponse;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
@ -39,11 +42,23 @@ import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A base class for the response of a write operation that involves a single doc
|
* A base class for the response of a write operation that involves a single doc
|
||||||
*/
|
*/
|
||||||
public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContentObject {
|
public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContentObject {
|
||||||
|
|
||||||
|
private static final String _SHARDS = "_shards";
|
||||||
|
private static final String _INDEX = "_index";
|
||||||
|
private static final String _TYPE = "_type";
|
||||||
|
private static final String _ID = "_id";
|
||||||
|
private static final String _VERSION = "_version";
|
||||||
|
private static final String _SEQ_NO = "_seq_no";
|
||||||
|
private static final String RESULT = "result";
|
||||||
|
private static final String FORCED_REFRESH = "forced_refresh";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An enum that represents the the results of CRUD operations, primarily used to communicate the type of
|
* An enum that represents the the results of CRUD operations, primarily used to communicate the type of
|
||||||
* operation that occurred.
|
* operation that occurred.
|
||||||
|
@ -253,18 +268,32 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
||||||
|
|
||||||
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
ReplicationResponse.ShardInfo shardInfo = getShardInfo();
|
ReplicationResponse.ShardInfo shardInfo = getShardInfo();
|
||||||
builder.field("_index", shardId.getIndexName())
|
builder.field(_INDEX, shardId.getIndexName())
|
||||||
.field("_type", type)
|
.field(_TYPE, type)
|
||||||
.field("_id", id)
|
.field(_ID, id)
|
||||||
.field("_version", version)
|
.field(_VERSION, version)
|
||||||
.field("result", getResult().getLowercase());
|
.field(RESULT, getResult().getLowercase());
|
||||||
if (forcedRefresh) {
|
if (forcedRefresh) {
|
||||||
builder.field("forced_refresh", true);
|
builder.field(FORCED_REFRESH, true);
|
||||||
}
|
}
|
||||||
shardInfo.toXContent(builder, params);
|
builder.field(_SHARDS, shardInfo);
|
||||||
if (getSeqNo() >= 0) {
|
if (getSeqNo() >= 0) {
|
||||||
builder.field("_seq_no", getSeqNo());
|
builder.field(_SEQ_NO, getSeqNo());
|
||||||
}
|
}
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Declare the {@link ObjectParser} fields to use when parsing a {@link DocWriteResponse}
|
||||||
|
*/
|
||||||
|
protected static void declareParserFields(ConstructingObjectParser<? extends DocWriteResponse, Void> objParser) {
|
||||||
|
objParser.declareString(constructorArg(), new ParseField(_INDEX));
|
||||||
|
objParser.declareString(constructorArg(), new ParseField(_TYPE));
|
||||||
|
objParser.declareString(constructorArg(), new ParseField(_ID));
|
||||||
|
objParser.declareLong(constructorArg(), new ParseField(_VERSION));
|
||||||
|
objParser.declareString(constructorArg(), new ParseField(RESULT));
|
||||||
|
objParser.declareLong(optionalConstructorArg(), new ParseField(_SEQ_NO));
|
||||||
|
objParser.declareBoolean(DocWriteResponse::setForcedRefresh, new ParseField(FORCED_REFRESH));
|
||||||
|
objParser.declareObject(DocWriteResponse::setShardInfo, (p, c) -> ShardInfo.fromXContent(p), new ParseField(_SHARDS));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,13 +20,21 @@
|
||||||
package org.elasticsearch.action.index;
|
package org.elasticsearch.action.index;
|
||||||
|
|
||||||
import org.elasticsearch.action.DocWriteResponse;
|
import org.elasticsearch.action.DocWriteResponse;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.seqno.SequenceNumbersService;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A response of an index operation,
|
* A response of an index operation,
|
||||||
*
|
*
|
||||||
|
@ -35,6 +43,8 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class IndexResponse extends DocWriteResponse {
|
public class IndexResponse extends DocWriteResponse {
|
||||||
|
|
||||||
|
private static final String CREATED = "created";
|
||||||
|
|
||||||
public IndexResponse() {
|
public IndexResponse() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,7 +74,34 @@ public class IndexResponse extends DocWriteResponse {
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
super.innerToXContent(builder, params);
|
super.innerToXContent(builder, params);
|
||||||
builder.field("created", result == Result.CREATED);
|
builder.field(CREATED, result == Result.CREATED);
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ConstructingObjectParser used to parse the {@link IndexResponse}. We use a ObjectParser here
|
||||||
|
* because most fields are parsed by the parent abstract class {@link DocWriteResponse} and it's
|
||||||
|
* not easy to parse part of the fields in the parent class and other fields in the children class
|
||||||
|
* using the usual streamed parsing method.
|
||||||
|
*/
|
||||||
|
private static final ConstructingObjectParser<IndexResponse, Void> PARSER;
|
||||||
|
static {
|
||||||
|
PARSER = new ConstructingObjectParser<>(IndexResponse.class.getName(),
|
||||||
|
args -> {
|
||||||
|
// index uuid and shard id are unknown and can't be parsed back for now.
|
||||||
|
ShardId shardId = new ShardId(new Index((String) args[0], IndexMetaData.INDEX_UUID_NA_VALUE), -1);
|
||||||
|
String type = (String) args[1];
|
||||||
|
String id = (String) args[2];
|
||||||
|
long version = (long) args[3];
|
||||||
|
long seqNo = (args[5] != null) ? (long) args[5] : SequenceNumbersService.UNASSIGNED_SEQ_NO;
|
||||||
|
boolean created = (boolean) args[6];
|
||||||
|
return new IndexResponse(shardId, type, id, seqNo, version, created);
|
||||||
|
});
|
||||||
|
DocWriteResponse.declareParserFields(PARSER);
|
||||||
|
PARSER.declareBoolean(constructorArg(), new ParseField(CREATED));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IndexResponse fromXContent(XContentParser parser) throws IOException {
|
||||||
|
return PARSER.apply(parser, null);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -292,7 +292,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
||||||
private void raiseEarlyFailure(Exception e) {
|
private void raiseEarlyFailure(Exception e) {
|
||||||
for (AtomicArray.Entry<FirstResult> entry : firstResults.asList()) {
|
for (AtomicArray.Entry<FirstResult> entry : firstResults.asList()) {
|
||||||
try {
|
try {
|
||||||
DiscoveryNode node = nodeIdToDiscoveryNode.apply(entry.value.shardTarget().nodeId());
|
DiscoveryNode node = nodeIdToDiscoveryNode.apply(entry.value.shardTarget().getNodeId());
|
||||||
sendReleaseSearchContext(entry.value.id(), node);
|
sendReleaseSearchContext(entry.value.id(), node);
|
||||||
} catch (Exception inner) {
|
} catch (Exception inner) {
|
||||||
inner.addSuppressed(e);
|
inner.addSuppressed(e);
|
||||||
|
@ -317,7 +317,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
||||||
if (queryResult.hasHits()
|
if (queryResult.hasHits()
|
||||||
&& docIdsToLoad.get(entry.index) == null) { // but none of them made it to the global top docs
|
&& docIdsToLoad.get(entry.index) == null) { // but none of them made it to the global top docs
|
||||||
try {
|
try {
|
||||||
DiscoveryNode node = nodeIdToDiscoveryNode.apply(entry.value.queryResult().shardTarget().nodeId());
|
DiscoveryNode node = nodeIdToDiscoveryNode.apply(entry.value.queryResult().shardTarget().getNodeId());
|
||||||
sendReleaseSearchContext(entry.value.queryResult().id(), node);
|
sendReleaseSearchContext(entry.value.queryResult().id(), node);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.trace("failed to release context", e);
|
logger.trace("failed to release context", e);
|
||||||
|
|
|
@ -75,7 +75,7 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
|
||||||
|
|
||||||
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
|
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
|
||||||
DfsSearchResult dfsResult = entry.value;
|
DfsSearchResult dfsResult = entry.value;
|
||||||
DiscoveryNode node = nodeIdToDiscoveryNode.apply(dfsResult.shardTarget().nodeId());
|
DiscoveryNode node = nodeIdToDiscoveryNode.apply(dfsResult.shardTarget().getNodeId());
|
||||||
QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
|
QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
|
||||||
executeSecondPhase(entry.index, dfsResult, counter, node, querySearchRequest);
|
executeSecondPhase(entry.index, dfsResult, counter, node, querySearchRequest);
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
|
||||||
final AtomicInteger counter = new AtomicInteger(firstResults.asList().size());
|
final AtomicInteger counter = new AtomicInteger(firstResults.asList().size());
|
||||||
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
|
for (final AtomicArray.Entry<DfsSearchResult> entry : firstResults.asList()) {
|
||||||
DfsSearchResult dfsResult = entry.value;
|
DfsSearchResult dfsResult = entry.value;
|
||||||
DiscoveryNode node = nodeIdToDiscoveryNode.apply(dfsResult.shardTarget().nodeId());
|
DiscoveryNode node = nodeIdToDiscoveryNode.apply(dfsResult.shardTarget().getNodeId());
|
||||||
QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
|
QuerySearchRequest querySearchRequest = new QuerySearchRequest(request, dfsResult.id(), dfs);
|
||||||
executeQuery(entry.index, dfsResult, counter, querySearchRequest, node);
|
executeQuery(entry.index, dfsResult, counter, querySearchRequest, node);
|
||||||
}
|
}
|
||||||
|
@ -155,7 +155,7 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
|
||||||
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
|
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
|
||||||
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
|
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
|
||||||
QuerySearchResult queryResult = queryResults.get(entry.index);
|
QuerySearchResult queryResult = queryResults.get(entry.index);
|
||||||
DiscoveryNode node = nodeIdToDiscoveryNode.apply(queryResult.shardTarget().nodeId());
|
DiscoveryNode node = nodeIdToDiscoveryNode.apply(queryResult.shardTarget().getNodeId());
|
||||||
ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult, entry, lastEmittedDocPerShard);
|
ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult, entry, lastEmittedDocPerShard);
|
||||||
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
|
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,9 +71,9 @@ import java.util.stream.StreamSupport;
|
||||||
public class SearchPhaseController extends AbstractComponent {
|
public class SearchPhaseController extends AbstractComponent {
|
||||||
|
|
||||||
private static final Comparator<AtomicArray.Entry<? extends QuerySearchResultProvider>> QUERY_RESULT_ORDERING = (o1, o2) -> {
|
private static final Comparator<AtomicArray.Entry<? extends QuerySearchResultProvider>> QUERY_RESULT_ORDERING = (o1, o2) -> {
|
||||||
int i = o1.value.shardTarget().index().compareTo(o2.value.shardTarget().index());
|
int i = o1.value.shardTarget().getIndex().compareTo(o2.value.shardTarget().getIndex());
|
||||||
if (i == 0) {
|
if (i == 0) {
|
||||||
i = o1.value.shardTarget().shardId().id() - o2.value.shardTarget().shardId().id();
|
i = o1.value.shardTarget().getShardId().id() - o2.value.shardTarget().getShardId().id();
|
||||||
}
|
}
|
||||||
return i;
|
return i;
|
||||||
};
|
};
|
||||||
|
|
|
@ -90,7 +90,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
|
||||||
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
|
final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
|
||||||
for (AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
|
for (AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
|
||||||
QuerySearchResultProvider queryResult = firstResults.get(entry.index);
|
QuerySearchResultProvider queryResult = firstResults.get(entry.index);
|
||||||
DiscoveryNode node = nodeIdToDiscoveryNode.apply(queryResult.shardTarget().nodeId());
|
DiscoveryNode node = nodeIdToDiscoveryNode.apply(queryResult.shardTarget().getNodeId());
|
||||||
ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult(), entry, lastEmittedDocPerShard);
|
ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult(), entry, lastEmittedDocPerShard);
|
||||||
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
|
executeFetch(entry.index, queryResult.shardTarget(), counter, fetchSearchRequest, node);
|
||||||
}
|
}
|
||||||
|
|
|
@ -185,7 +185,7 @@ class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction {
|
||||||
final QuerySearchResult querySearchResult = queryResults.get(entry.index);
|
final QuerySearchResult querySearchResult = queryResults.get(entry.index);
|
||||||
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index];
|
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index];
|
||||||
ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.id(), docIds, lastEmittedDoc);
|
ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.id(), docIds, lastEmittedDoc);
|
||||||
DiscoveryNode node = nodes.get(querySearchResult.shardTarget().nodeId());
|
DiscoveryNode node = nodes.get(querySearchResult.shardTarget().getNodeId());
|
||||||
searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, task, new ActionListener<FetchSearchResult>() {
|
searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, task, new ActionListener<FetchSearchResult>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(FetchSearchResult result) {
|
public void onResponse(FetchSearchResult result) {
|
||||||
|
|
|
@ -93,7 +93,7 @@ public class ShardSearchFailure implements ShardOperationFailedException {
|
||||||
@Override
|
@Override
|
||||||
public String index() {
|
public String index() {
|
||||||
if (shardTarget != null) {
|
if (shardTarget != null) {
|
||||||
return shardTarget.index();
|
return shardTarget.getIndex();
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ public class ShardSearchFailure implements ShardOperationFailedException {
|
||||||
@Override
|
@Override
|
||||||
public int shardId() {
|
public int shardId() {
|
||||||
if (shardTarget != null) {
|
if (shardTarget != null) {
|
||||||
return shardTarget.shardId().id();
|
return shardTarget.getShardId().id();
|
||||||
}
|
}
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
@ -156,7 +156,7 @@ public class ShardSearchFailure implements ShardOperationFailedException {
|
||||||
builder.field("shard", shardId());
|
builder.field("shard", shardId());
|
||||||
builder.field("index", index());
|
builder.field("index", index());
|
||||||
if (shardTarget != null) {
|
if (shardTarget != null) {
|
||||||
builder.field("node", shardTarget.nodeId());
|
builder.field("node", shardTarget.getNodeId());
|
||||||
}
|
}
|
||||||
if (cause != null) {
|
if (cause != null) {
|
||||||
builder.field("reason");
|
builder.field("reason");
|
||||||
|
|
|
@ -21,11 +21,9 @@ package org.elasticsearch.action.search;
|
||||||
|
|
||||||
import org.apache.lucene.store.ByteArrayDataInput;
|
import org.apache.lucene.store.ByteArrayDataInput;
|
||||||
import org.apache.lucene.store.RAMOutputStream;
|
import org.apache.lucene.store.RAMOutputStream;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
|
||||||
import org.elasticsearch.common.util.concurrent.AtomicArray;
|
import org.elasticsearch.common.util.concurrent.AtomicArray;
|
||||||
import org.elasticsearch.search.SearchPhaseResult;
|
import org.elasticsearch.search.SearchPhaseResult;
|
||||||
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
|
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
|
||||||
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
|
@ -53,7 +51,7 @@ final class TransportSearchHelper {
|
||||||
for (AtomicArray.Entry<? extends SearchPhaseResult> entry : searchPhaseResults.asList()) {
|
for (AtomicArray.Entry<? extends SearchPhaseResult> entry : searchPhaseResults.asList()) {
|
||||||
SearchPhaseResult searchPhaseResult = entry.value;
|
SearchPhaseResult searchPhaseResult = entry.value;
|
||||||
out.writeLong(searchPhaseResult.id());
|
out.writeLong(searchPhaseResult.id());
|
||||||
out.writeString(searchPhaseResult.shardTarget().nodeId());
|
out.writeString(searchPhaseResult.shardTarget().getNodeId());
|
||||||
}
|
}
|
||||||
byte[] bytes = new byte[(int) out.getFilePointer()];
|
byte[] bytes = new byte[(int) out.getFilePointer()];
|
||||||
out.writeTo(bytes, 0);
|
out.writeTo(bytes, 0);
|
||||||
|
|
|
@ -73,7 +73,7 @@ public class ReplicationResponse extends ActionResponse {
|
||||||
this.shardInfo = shardInfo;
|
this.shardInfo = shardInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class ShardInfo implements Streamable, ToXContent {
|
public static class ShardInfo implements Streamable, ToXContentObject {
|
||||||
|
|
||||||
private static final String _SHARDS = "_shards";
|
private static final String _SHARDS = "_shards";
|
||||||
private static final String TOTAL = "total";
|
private static final String TOTAL = "total";
|
||||||
|
@ -179,7 +179,7 @@ public class ReplicationResponse extends ActionResponse {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(_SHARDS);
|
builder.startObject();
|
||||||
builder.field(TOTAL, total);
|
builder.field(TOTAL, total);
|
||||||
builder.field(SUCCESSFUL, successful);
|
builder.field(SUCCESSFUL, successful);
|
||||||
builder.field(FAILED, getFailed());
|
builder.field(FAILED, getFailed());
|
||||||
|
@ -195,18 +195,12 @@ public class ReplicationResponse extends ActionResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ShardInfo fromXContent(XContentParser parser) throws IOException {
|
public static ShardInfo fromXContent(XContentParser parser) throws IOException {
|
||||||
XContentParser.Token token = parser.nextToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
|
||||||
|
|
||||||
String currentFieldName = parser.currentName();
|
|
||||||
if (_SHARDS.equals(currentFieldName) == false) {
|
|
||||||
throwUnknownField(currentFieldName, parser.getTokenLocation());
|
|
||||||
}
|
|
||||||
token = parser.nextToken();
|
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
|
||||||
|
|
||||||
int total = 0, successful = 0;
|
int total = 0, successful = 0;
|
||||||
List<Failure> failuresList = null;
|
List<Failure> failuresList = null;
|
||||||
|
String currentFieldName = null;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
|
|
|
@ -28,12 +28,14 @@ import org.elasticsearch.action.GenericAction;
|
||||||
import org.elasticsearch.action.support.TransportAction;
|
import org.elasticsearch.action.support.TransportAction;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.client.support.AbstractClient;
|
import org.elasticsearch.client.support.AbstractClient;
|
||||||
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.tasks.TaskListener;
|
import org.elasticsearch.tasks.TaskListener;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Client that executes actions on the local node.
|
* Client that executes actions on the local node.
|
||||||
|
@ -41,13 +43,19 @@ import java.util.Map;
|
||||||
public class NodeClient extends AbstractClient {
|
public class NodeClient extends AbstractClient {
|
||||||
|
|
||||||
private Map<GenericAction, TransportAction> actions;
|
private Map<GenericAction, TransportAction> actions;
|
||||||
|
/**
|
||||||
|
* The id of the local {@link DiscoveryNode}. Useful for generating task ids from tasks returned by
|
||||||
|
* {@link #executeLocally(GenericAction, ActionRequest, TaskListener)}.
|
||||||
|
*/
|
||||||
|
private Supplier<String> localNodeId;
|
||||||
|
|
||||||
public NodeClient(Settings settings, ThreadPool threadPool) {
|
public NodeClient(Settings settings, ThreadPool threadPool) {
|
||||||
super(settings, threadPool);
|
super(settings, threadPool);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void initialize(Map<GenericAction, TransportAction> actions) {
|
public void initialize(Map<GenericAction, TransportAction> actions, Supplier<String> localNodeId) {
|
||||||
this.actions = actions;
|
this.actions = actions;
|
||||||
|
this.localNodeId = localNodeId;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -85,6 +93,14 @@ public class NodeClient extends AbstractClient {
|
||||||
return transportAction(action).execute(request, listener);
|
return transportAction(action).execute(request, listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The id of the local {@link DiscoveryNode}. Useful for generating task ids from tasks returned by
|
||||||
|
* {@link #executeLocally(GenericAction, ActionRequest, TaskListener)}.
|
||||||
|
*/
|
||||||
|
public String getLocalNodeId() {
|
||||||
|
return localNodeId.get();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the {@link TransportAction} for an {@link Action}, throwing exceptions if the action isn't available.
|
* Get the {@link TransportAction} for an {@link Action}, throwing exceptions if the action isn't available.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -36,7 +36,9 @@ import org.elasticsearch.discovery.zen.NodesFaultDetection;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.ScheduledFuture;
|
import java.util.concurrent.ScheduledFuture;
|
||||||
|
|
||||||
|
@ -76,20 +78,26 @@ public class NodeConnectionsService extends AbstractLifecycleComponent {
|
||||||
this.reconnectInterval = NodeConnectionsService.CLUSTER_NODE_RECONNECT_INTERVAL_SETTING.get(settings);
|
this.reconnectInterval = NodeConnectionsService.CLUSTER_NODE_RECONNECT_INTERVAL_SETTING.get(settings);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void connectToNodes(List<DiscoveryNode> addedNodes) {
|
public void connectToNodes(Iterable<DiscoveryNode> discoveryNodes) {
|
||||||
|
|
||||||
// TODO: do this in parallel (and wait)
|
// TODO: do this in parallel (and wait)
|
||||||
for (final DiscoveryNode node : addedNodes) {
|
for (final DiscoveryNode node : discoveryNodes) {
|
||||||
try (Releasable ignored = nodeLocks.acquire(node)) {
|
try (Releasable ignored = nodeLocks.acquire(node)) {
|
||||||
Integer current = nodes.put(node, 0);
|
nodes.putIfAbsent(node, 0);
|
||||||
assert current == null : "node " + node + " was added in event but already in internal nodes";
|
|
||||||
validateNodeConnected(node);
|
validateNodeConnected(node);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void disconnectFromNodes(List<DiscoveryNode> removedNodes) {
|
/**
|
||||||
for (final DiscoveryNode node : removedNodes) {
|
* Disconnects from all nodes except the ones provided as parameter
|
||||||
|
*/
|
||||||
|
public void disconnectFromNodesExcept(Iterable<DiscoveryNode> nodesToKeep) {
|
||||||
|
Set<DiscoveryNode> currentNodes = new HashSet<>(nodes.keySet());
|
||||||
|
for (DiscoveryNode node : nodesToKeep) {
|
||||||
|
currentNodes.remove(node);
|
||||||
|
}
|
||||||
|
for (final DiscoveryNode node : currentNodes) {
|
||||||
try (Releasable ignored = nodeLocks.acquire(node)) {
|
try (Releasable ignored = nodeLocks.acquire(node)) {
|
||||||
Integer current = nodes.remove(node);
|
Integer current = nodes.remove(node);
|
||||||
assert current != null : "node " + node + " was removed in event but not in internal nodes";
|
assert current != null : "node " + node + " was removed in event but not in internal nodes";
|
||||||
|
|
|
@ -772,7 +772,7 @@ public class ClusterService extends AbstractLifecycleComponent {
|
||||||
taskOutputs.createAckListener(threadPool, newClusterState) :
|
taskOutputs.createAckListener(threadPool, newClusterState) :
|
||||||
null;
|
null;
|
||||||
|
|
||||||
nodeConnectionsService.connectToNodes(clusterChangedEvent.nodesDelta().addedNodes());
|
nodeConnectionsService.connectToNodes(newClusterState.nodes());
|
||||||
|
|
||||||
// if we are the master, publish the new state to all nodes
|
// if we are the master, publish the new state to all nodes
|
||||||
// we publish here before we send a notification to all the listeners, since if it fails
|
// we publish here before we send a notification to all the listeners, since if it fails
|
||||||
|
@ -788,7 +788,8 @@ public class ClusterService extends AbstractLifecycleComponent {
|
||||||
"failing [{}]: failed to commit cluster state version [{}]", taskInputs.summary, version),
|
"failing [{}]: failed to commit cluster state version [{}]", taskInputs.summary, version),
|
||||||
t);
|
t);
|
||||||
// ensure that list of connected nodes in NodeConnectionsService is in-sync with the nodes of the current cluster state
|
// ensure that list of connected nodes in NodeConnectionsService is in-sync with the nodes of the current cluster state
|
||||||
nodeConnectionsService.disconnectFromNodes(clusterChangedEvent.nodesDelta().addedNodes());
|
nodeConnectionsService.connectToNodes(previousClusterState.nodes());
|
||||||
|
nodeConnectionsService.disconnectFromNodesExcept(previousClusterState.nodes());
|
||||||
taskOutputs.publishingFailed(t);
|
taskOutputs.publishingFailed(t);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -808,7 +809,7 @@ public class ClusterService extends AbstractLifecycleComponent {
|
||||||
logger.debug("set local cluster state to version {}", newClusterState.version());
|
logger.debug("set local cluster state to version {}", newClusterState.version());
|
||||||
callClusterStateAppliers(newClusterState, clusterChangedEvent);
|
callClusterStateAppliers(newClusterState, clusterChangedEvent);
|
||||||
|
|
||||||
nodeConnectionsService.disconnectFromNodes(clusterChangedEvent.nodesDelta().removedNodes());
|
nodeConnectionsService.disconnectFromNodesExcept(newClusterState.nodes());
|
||||||
|
|
||||||
updateState(css -> newClusterState);
|
updateState(css -> newClusterState);
|
||||||
|
|
||||||
|
|
|
@ -38,8 +38,8 @@ public final class ESLoggerFactory {
|
||||||
public static final Setting<Level> LOG_DEFAULT_LEVEL_SETTING =
|
public static final Setting<Level> LOG_DEFAULT_LEVEL_SETTING =
|
||||||
new Setting<>("logger.level", Level.INFO.name(), Level::valueOf, Property.NodeScope);
|
new Setting<>("logger.level", Level.INFO.name(), Level::valueOf, Property.NodeScope);
|
||||||
public static final Setting<Level> LOG_LEVEL_SETTING =
|
public static final Setting<Level> LOG_LEVEL_SETTING =
|
||||||
Setting.prefixKeySetting("logger.", Level.INFO.name(), Level::valueOf,
|
Setting.prefixKeySetting("logger.", (key) -> new Setting<>(key, Level.INFO.name(), Level::valueOf, Property.Dynamic,
|
||||||
Property.Dynamic, Property.NodeScope);
|
Property.NodeScope));
|
||||||
|
|
||||||
public static Logger getLogger(String prefix, String name) {
|
public static Logger getLogger(String prefix, String name) {
|
||||||
return getLogger(prefix, LogManager.getLogger(name));
|
return getLogger(prefix, LogManager.getLogger(name));
|
||||||
|
|
|
@ -195,6 +195,19 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
||||||
addSettingsUpdater(setting.newUpdater(consumer, logger, validator));
|
addSettingsUpdater(setting.newUpdater(consumer, logger, validator));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a settings consumer for affix settings. Affix settings have a namespace associated to it that needs to be available to the
|
||||||
|
* consumer in order to be processed correctly.
|
||||||
|
*/
|
||||||
|
public synchronized <T> void addAffixUpdateConsumer(Setting.AffixSetting<T> setting, BiConsumer<String, T> consumer,
|
||||||
|
BiConsumer<String, T> validator) {
|
||||||
|
final Setting<?> registeredSetting = this.complexMatchers.get(setting.getKey());
|
||||||
|
if (setting != registeredSetting) {
|
||||||
|
throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]");
|
||||||
|
}
|
||||||
|
addSettingsUpdater(setting.newAffixUpdater(consumer, logger, validator));
|
||||||
|
}
|
||||||
|
|
||||||
synchronized void addSettingsUpdater(SettingUpdater<?> updater) {
|
synchronized void addSettingsUpdater(SettingUpdater<?> updater) {
|
||||||
this.settingUpdaters.add(updater);
|
this.settingUpdaters.add(updater);
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,14 +42,17 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
|
import java.util.IdentityHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A setting. Encapsulates typical stuff like default value, parsing, and scope.
|
* A setting. Encapsulates typical stuff like default value, parsing, and scope.
|
||||||
|
@ -410,8 +413,8 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Updates settings that depend on eachother. See {@link AbstractScopedSettings#addSettingsUpdateConsumer(Setting, Setting, BiConsumer)}
|
* Updates settings that depend on each other.
|
||||||
* and its usage for details.
|
* See {@link AbstractScopedSettings#addSettingsUpdateConsumer(Setting, Setting, BiConsumer)} and its usage for details.
|
||||||
*/
|
*/
|
||||||
static <A, B> AbstractScopedSettings.SettingUpdater<Tuple<A, B>> compoundUpdater(final BiConsumer<A, B> consumer,
|
static <A, B> AbstractScopedSettings.SettingUpdater<Tuple<A, B>> compoundUpdater(final BiConsumer<A, B> consumer,
|
||||||
final Setting<A> aSetting, final Setting<B> bSetting, Logger logger) {
|
final Setting<A> aSetting, final Setting<B> bSetting, Logger logger) {
|
||||||
|
@ -446,6 +449,76 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class AffixSetting<T> extends Setting<T> {
|
||||||
|
private final AffixKey key;
|
||||||
|
private final Function<String, Setting<T>> delegateFactory;
|
||||||
|
|
||||||
|
public AffixSetting(AffixKey key, Setting<T> delegate, Function<String, Setting<T>> delegateFactory) {
|
||||||
|
super(key, delegate.defaultValue, delegate.parser, delegate.properties.toArray(new Property[0]));
|
||||||
|
this.key = key;
|
||||||
|
this.delegateFactory = delegateFactory;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean isGroupSetting() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Stream<String> matchStream(Settings settings) {
|
||||||
|
return settings.getAsMap().keySet().stream().filter((key) -> match(key)).map(settingKey -> key.getConcreteString(settingKey));
|
||||||
|
}
|
||||||
|
|
||||||
|
AbstractScopedSettings.SettingUpdater<Map<AbstractScopedSettings.SettingUpdater<T>, T>> newAffixUpdater(
|
||||||
|
BiConsumer<String, T> consumer, Logger logger, BiConsumer<String, T> validator) {
|
||||||
|
return new AbstractScopedSettings.SettingUpdater<Map<AbstractScopedSettings.SettingUpdater<T>, T>>() {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasChanged(Settings current, Settings previous) {
|
||||||
|
return Stream.concat(matchStream(current), matchStream(previous)).findAny().isPresent();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<AbstractScopedSettings.SettingUpdater<T>, T> getValue(Settings current, Settings previous) {
|
||||||
|
// we collect all concrete keys and then delegate to the actual setting for validation and settings extraction
|
||||||
|
final Map<AbstractScopedSettings.SettingUpdater<T>, T> result = new IdentityHashMap<>();
|
||||||
|
Stream.concat(matchStream(current), matchStream(previous)).forEach(aKey -> {
|
||||||
|
String namespace = key.getNamespace(aKey);
|
||||||
|
AbstractScopedSettings.SettingUpdater<T> updater =
|
||||||
|
getConcreteSetting(aKey).newUpdater((v) -> consumer.accept(namespace, v), logger,
|
||||||
|
(v) -> validator.accept(namespace, v));
|
||||||
|
if (updater.hasChanged(current, previous)) {
|
||||||
|
// only the ones that have changed otherwise we might get too many updates
|
||||||
|
// the hasChanged above checks only if there are any changes
|
||||||
|
T value = updater.getValue(current, previous);
|
||||||
|
result.put(updater, value);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void apply(Map<AbstractScopedSettings.SettingUpdater<T>, T> value, Settings current, Settings previous) {
|
||||||
|
for (Map.Entry<AbstractScopedSettings.SettingUpdater<T>, T> entry : value.entrySet()) {
|
||||||
|
entry.getKey().apply(entry.getValue(), current, previous);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Setting<T> getConcreteSetting(String key) {
|
||||||
|
if (match(key)) {
|
||||||
|
return delegateFactory.apply(key);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("key [" + key + "] must match [" + getKey() + "] but didn't.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) {
|
||||||
|
matchStream(defaultSettings).forEach((key) -> getConcreteSetting(key).diff(builder, source, defaultSettings));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private final class Updater implements AbstractScopedSettings.SettingUpdater<T> {
|
private final class Updater implements AbstractScopedSettings.SettingUpdater<T> {
|
||||||
private final Consumer<T> consumer;
|
private final Consumer<T> consumer;
|
||||||
|
@ -727,7 +800,6 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private static String arrayToParsableString(String[] array) {
|
private static String arrayToParsableString(String[] array) {
|
||||||
try {
|
try {
|
||||||
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
|
||||||
|
@ -741,9 +813,11 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
throw new ElasticsearchException(ex);
|
throw new ElasticsearchException(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Setting<Settings> groupSetting(String key, Property... properties) {
|
public static Setting<Settings> groupSetting(String key, Property... properties) {
|
||||||
return groupSetting(key, (s) -> {}, properties);
|
return groupSetting(key, (s) -> {}, properties);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Setting<Settings> groupSetting(String key, Consumer<Settings> validator, Property... properties) {
|
public static Setting<Settings> groupSetting(String key, Consumer<Settings> validator, Property... properties) {
|
||||||
return new Setting<Settings>(new GroupKey(key), (s) -> "", (s) -> null, properties) {
|
return new Setting<Settings>(new GroupKey(key), (s) -> "", (s) -> null, properties) {
|
||||||
@Override
|
@Override
|
||||||
|
@ -894,59 +968,24 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
* can easily be added with this setting. Yet, prefix key settings don't support updaters out of the box unless
|
* can easily be added with this setting. Yet, prefix key settings don't support updaters out of the box unless
|
||||||
* {@link #getConcreteSetting(String)} is used to pull the updater.
|
* {@link #getConcreteSetting(String)} is used to pull the updater.
|
||||||
*/
|
*/
|
||||||
public static <T> Setting<T> prefixKeySetting(String prefix, String defaultValue, Function<String, T> parser,
|
public static <T> AffixSetting<T> prefixKeySetting(String prefix, Function<String, Setting<T>> delegateFactory) {
|
||||||
Property... properties) {
|
return affixKeySetting(new AffixKey(prefix), delegateFactory);
|
||||||
return affixKeySetting(AffixKey.withPrefix(prefix), (s) -> defaultValue, parser, properties);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This setting type allows to validate settings that have the same type and a common prefix and suffix. For instance
|
* This setting type allows to validate settings that have the same type and a common prefix and suffix. For instance
|
||||||
* storage.${backend}.enable=[true|false] can easily be added with this setting. Yet, adfix key settings don't support updaters
|
* storage.${backend}.enable=[true|false] can easily be added with this setting. Yet, affix key settings don't support updaters
|
||||||
* out of the box unless {@link #getConcreteSetting(String)} is used to pull the updater.
|
* out of the box unless {@link #getConcreteSetting(String)} is used to pull the updater.
|
||||||
*/
|
*/
|
||||||
public static <T> Setting<T> affixKeySetting(String prefix, String suffix, Function<Settings, String> defaultValue,
|
public static <T> AffixSetting<T> affixKeySetting(String prefix, String suffix, Function<String, Setting<T>> delegateFactory) {
|
||||||
Function<String, T> parser, Property... properties) {
|
return affixKeySetting(new AffixKey(prefix, suffix), delegateFactory);
|
||||||
return affixKeySetting(AffixKey.withAffix(prefix, suffix), defaultValue, parser, properties);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T> Setting<T> affixKeySetting(String prefix, String suffix, String defaultValue, Function<String, T> parser,
|
private static <T> AffixSetting<T> affixKeySetting(AffixKey key, Function<String, Setting<T>> delegateFactory) {
|
||||||
Property... properties) {
|
Setting<T> delegate = delegateFactory.apply("_na_");
|
||||||
return affixKeySetting(prefix, suffix, (s) -> defaultValue, parser, properties);
|
return new AffixSetting<>(key, delegate, delegateFactory);
|
||||||
}
|
};
|
||||||
|
|
||||||
public static <T> Setting<T> affixKeySetting(AffixKey key, Function<Settings, String> defaultValue, Function<String, T> parser,
|
|
||||||
Property... properties) {
|
|
||||||
return new Setting<T>(key, defaultValue, parser, properties) {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
boolean isGroupSetting() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
AbstractScopedSettings.SettingUpdater<T> newUpdater(Consumer<T> consumer, Logger logger, Consumer<T> validator) {
|
|
||||||
throw new UnsupportedOperationException("Affix settings can't be updated. Use #getConcreteSetting for updating.");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Setting<T> getConcreteSetting(String key) {
|
|
||||||
if (match(key)) {
|
|
||||||
return new Setting<>(key, defaultValue, parser, properties);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("key [" + key + "] must match [" + getKey() + "] but didn't.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) {
|
|
||||||
for (Map.Entry<String, String> entry : defaultSettings.getAsMap().entrySet()) {
|
|
||||||
if (match(entry.getKey())) {
|
|
||||||
getConcreteSetting(entry.getKey()).diff(builder, source, defaultSettings);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public interface Key {
|
public interface Key {
|
||||||
|
@ -1012,37 +1051,60 @@ public class Setting<T> extends ToXContentToBytes {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A key that allows for static pre and suffix. This is used for settings
|
||||||
|
* that have dynamic namespaces like for different accounts etc.
|
||||||
|
*/
|
||||||
public static final class AffixKey implements Key {
|
public static final class AffixKey implements Key {
|
||||||
public static AffixKey withPrefix(String prefix) {
|
private final Pattern pattern;
|
||||||
return new AffixKey(prefix, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static AffixKey withAffix(String prefix, String suffix) {
|
|
||||||
return new AffixKey(prefix, suffix);
|
|
||||||
}
|
|
||||||
|
|
||||||
private final String prefix;
|
private final String prefix;
|
||||||
private final String suffix;
|
private final String suffix;
|
||||||
|
|
||||||
public AffixKey(String prefix, String suffix) {
|
AffixKey(String prefix) {
|
||||||
|
this(prefix, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
AffixKey(String prefix, String suffix) {
|
||||||
assert prefix != null || suffix != null: "Either prefix or suffix must be non-null";
|
assert prefix != null || suffix != null: "Either prefix or suffix must be non-null";
|
||||||
|
|
||||||
this.prefix = prefix;
|
this.prefix = prefix;
|
||||||
if (prefix.endsWith(".") == false) {
|
if (prefix.endsWith(".") == false) {
|
||||||
throw new IllegalArgumentException("prefix must end with a '.'");
|
throw new IllegalArgumentException("prefix must end with a '.'");
|
||||||
}
|
}
|
||||||
this.suffix = suffix;
|
this.suffix = suffix;
|
||||||
|
if (suffix == null) {
|
||||||
|
pattern = Pattern.compile("(" + Pattern.quote(prefix) + "((?:[-\\w]+[.])*[-\\w]+$))");
|
||||||
|
} else {
|
||||||
|
// the last part of this regexp is for lists since they are represented as x.${namespace}.y.1, x.${namespace}.y.2
|
||||||
|
pattern = Pattern.compile("(" + Pattern.quote(prefix) + "([-\\w]+)\\." + Pattern.quote(suffix) + ")(?:\\.\\d+)?");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean match(String key) {
|
public boolean match(String key) {
|
||||||
boolean match = true;
|
return pattern.matcher(key).matches();
|
||||||
if (prefix != null) {
|
}
|
||||||
match = key.startsWith(prefix);
|
|
||||||
|
/**
|
||||||
|
* Returns a string representation of the concrete setting key
|
||||||
|
*/
|
||||||
|
String getConcreteString(String key) {
|
||||||
|
Matcher matcher = pattern.matcher(key);
|
||||||
|
if (matcher.matches() == false) {
|
||||||
|
throw new IllegalStateException("can't get concrete string for key " + key + " key doesn't match");
|
||||||
}
|
}
|
||||||
if (suffix != null) {
|
return matcher.group(1);
|
||||||
match = match && key.endsWith(suffix);
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a string representation of the concrete setting key
|
||||||
|
*/
|
||||||
|
String getNamespace(String key) {
|
||||||
|
Matcher matcher = pattern.matcher(key);
|
||||||
|
if (matcher.matches() == false) {
|
||||||
|
throw new IllegalStateException("can't get concrete string for key " + key + " key doesn't match");
|
||||||
}
|
}
|
||||||
return match;
|
return matcher.group(2);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SimpleKey toConcreteKey(String missingPart) {
|
public SimpleKey toConcreteKey(String missingPart) {
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.common.xcontent;
|
package org.elasticsearch.common.xcontent;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -56,6 +57,14 @@ public final class XContentParserUtils {
|
||||||
throw new ParsingException(location, String.format(Locale.ROOT, message, field));
|
throw new ParsingException(location, String.format(Locale.ROOT, message, field));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @throws ParsingException with a "unknown token found" reason
|
||||||
|
*/
|
||||||
|
public static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
|
||||||
|
String message = "Failed to parse object: unexpected token [%s] found";
|
||||||
|
throw new ParsingException(location, String.format(Locale.ROOT, message, token));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Makes sure that provided token is of the expected type
|
* Makes sure that provided token is of the expected type
|
||||||
*
|
*
|
||||||
|
@ -67,4 +76,35 @@ public final class XContentParserUtils {
|
||||||
throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, actual));
|
throw new ParsingException(location.get(), String.format(Locale.ROOT, message, expected, actual));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the current token depending on its token type. The following token types will be
|
||||||
|
* parsed by the corresponding parser methods:
|
||||||
|
* <ul>
|
||||||
|
* <li>XContentParser.Token.VALUE_STRING: parser.text()</li>
|
||||||
|
* <li>XContentParser.Token.VALUE_NUMBER: parser.numberValue()</li>
|
||||||
|
* <li>XContentParser.Token.VALUE_BOOLEAN: parser.booleanValue()</li>
|
||||||
|
* <li>XContentParser.Token.VALUE_EMBEDDED_OBJECT: parser.binaryValue()</li>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @throws ParsingException if the token none of the allowed values
|
||||||
|
*/
|
||||||
|
public static Object parseStoredFieldsValue(XContentParser parser) throws IOException {
|
||||||
|
XContentParser.Token token = parser.currentToken();
|
||||||
|
Object value = null;
|
||||||
|
if (token == XContentParser.Token.VALUE_STRING) {
|
||||||
|
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
|
||||||
|
value = parser.text();
|
||||||
|
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||||
|
value = parser.numberValue();
|
||||||
|
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||||
|
value = parser.booleanValue();
|
||||||
|
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||||
|
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
|
||||||
|
value = new BytesArray(parser.binaryValue());
|
||||||
|
} else {
|
||||||
|
throwUnknownToken(token, parser.getTokenLocation());
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.get;
|
package org.elasticsearch.index.get;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParsingException;
|
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
|
@ -36,6 +34,7 @@ import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue;
|
||||||
|
|
||||||
public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
||||||
|
|
||||||
|
@ -119,21 +118,7 @@ public class GetField implements Streamable, ToXContent, Iterable<Object> {
|
||||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
|
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
|
||||||
List<Object> values = new ArrayList<>();
|
List<Object> values = new ArrayList<>();
|
||||||
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
Object value;
|
values.add(parseStoredFieldsValue(parser));
|
||||||
if (token == XContentParser.Token.VALUE_STRING) {
|
|
||||||
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
|
|
||||||
value = parser.text();
|
|
||||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
|
||||||
value = parser.numberValue();
|
|
||||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
|
||||||
value = parser.booleanValue();
|
|
||||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
|
||||||
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
|
|
||||||
value = new BytesArray(parser.binaryValue());
|
|
||||||
} else {
|
|
||||||
throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: unsupported token found [" + token + "]");
|
|
||||||
}
|
|
||||||
values.add(value);
|
|
||||||
}
|
}
|
||||||
return new GetField(fieldName, values);
|
return new GetField(fieldName, values);
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.search.BooleanQuery;
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
import org.apache.lucene.search.BoostQuery;
|
import org.apache.lucene.search.BoostQuery;
|
||||||
import org.apache.lucene.search.FuzzyQuery;
|
import org.apache.lucene.search.FuzzyQuery;
|
||||||
|
import org.apache.lucene.search.GraphQuery;
|
||||||
import org.apache.lucene.search.MultiPhraseQuery;
|
import org.apache.lucene.search.MultiPhraseQuery;
|
||||||
import org.apache.lucene.search.MultiTermQuery;
|
import org.apache.lucene.search.MultiTermQuery;
|
||||||
import org.apache.lucene.search.PhraseQuery;
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
|
@ -48,6 +49,7 @@ import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.index.query.support.QueryParsers;
|
import org.elasticsearch.index.query.support.QueryParsers;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class MatchQuery {
|
public class MatchQuery {
|
||||||
|
|
||||||
|
@ -316,6 +318,21 @@ public class MatchQuery {
|
||||||
|
|
||||||
public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) {
|
public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) {
|
||||||
final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop);
|
final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop);
|
||||||
|
if (query instanceof GraphQuery) {
|
||||||
|
// we have a graph query, convert inner queries to multi phrase prefix queries
|
||||||
|
List<Query> oldQueries = ((GraphQuery) query).getQueries();
|
||||||
|
Query[] queries = new Query[oldQueries.size()];
|
||||||
|
for (int i = 0; i < queries.length; i++) {
|
||||||
|
queries[i] = toMultiPhrasePrefix(oldQueries.get(i), phraseSlop, maxExpansions);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new GraphQuery(queries);
|
||||||
|
}
|
||||||
|
|
||||||
|
return toMultiPhrasePrefix(query, phraseSlop, maxExpansions);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Query toMultiPhrasePrefix(final Query query, int phraseSlop, int maxExpansions) {
|
||||||
float boost = 1;
|
float boost = 1;
|
||||||
Query innerQuery = query;
|
Query innerQuery = query;
|
||||||
while (innerQuery instanceof BoostQuery) {
|
while (innerQuery instanceof BoostQuery) {
|
||||||
|
@ -357,18 +374,38 @@ public class MatchQuery {
|
||||||
Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur);
|
Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur);
|
||||||
if (booleanQuery != null && booleanQuery instanceof BooleanQuery) {
|
if (booleanQuery != null && booleanQuery instanceof BooleanQuery) {
|
||||||
BooleanQuery bq = (BooleanQuery) booleanQuery;
|
BooleanQuery bq = (BooleanQuery) booleanQuery;
|
||||||
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, (
|
return boolToExtendedCommonTermsQuery(bq, highFreqOccur, lowFreqOccur, maxTermFrequency, fieldType);
|
||||||
(BooleanQuery) booleanQuery).isCoordDisabled(), fieldType);
|
} else if (booleanQuery != null && booleanQuery instanceof GraphQuery && ((GraphQuery) booleanQuery).hasBoolean()) {
|
||||||
for (BooleanClause clause : bq.clauses()) {
|
// we have a graph query that has at least one boolean sub-query
|
||||||
if (!(clause.getQuery() instanceof TermQuery)) {
|
// re-build and use extended common terms
|
||||||
return booleanQuery;
|
List<Query> oldQueries = ((GraphQuery) booleanQuery).getQueries();
|
||||||
|
Query[] queries = new Query[oldQueries.size()];
|
||||||
|
for (int i = 0; i < queries.length; i++) {
|
||||||
|
Query oldQuery = oldQueries.get(i);
|
||||||
|
if (oldQuery instanceof BooleanQuery) {
|
||||||
|
queries[i] = boolToExtendedCommonTermsQuery((BooleanQuery) oldQuery, highFreqOccur, lowFreqOccur, maxTermFrequency, fieldType);
|
||||||
|
} else {
|
||||||
|
queries[i] = oldQuery;
|
||||||
}
|
}
|
||||||
query.add(((TermQuery) clause.getQuery()).getTerm());
|
|
||||||
}
|
}
|
||||||
return query;
|
|
||||||
}
|
|
||||||
return booleanQuery;
|
|
||||||
|
|
||||||
|
return new GraphQuery(queries);
|
||||||
|
}
|
||||||
|
|
||||||
|
return booleanQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, Occur highFreqOccur, Occur lowFreqOccur, float
|
||||||
|
maxTermFrequency, MappedFieldType fieldType) {
|
||||||
|
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency,
|
||||||
|
bq.isCoordDisabled(), fieldType);
|
||||||
|
for (BooleanClause clause : bq.clauses()) {
|
||||||
|
if (!(clause.getQuery() instanceof TermQuery)) {
|
||||||
|
return bq;
|
||||||
|
}
|
||||||
|
query.add(((TermQuery) clause.getQuery()).getTerm());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.shard;
|
package org.elasticsearch.index.shard;
|
||||||
|
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
|
@ -71,6 +72,22 @@ public class ShardId implements Streamable, Comparable<ShardId> {
|
||||||
return "[" + index.getName() + "][" + shardId + "]";
|
return "[" + index.getName() + "][" + shardId + "]";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the string representation of this shardId back to an object.
|
||||||
|
* We lose index uuid information here, but since we use toString in
|
||||||
|
* rest responses, this is the best we can do to reconstruct the object
|
||||||
|
* on the client side.
|
||||||
|
*/
|
||||||
|
public static ShardId fromString(String shardIdString) {
|
||||||
|
int splitPosition = shardIdString.indexOf("][");
|
||||||
|
if (splitPosition <= 0 || shardIdString.charAt(0) != '[' || shardIdString.charAt(shardIdString.length() - 1) != ']') {
|
||||||
|
throw new IllegalArgumentException("Unexpected shardId string format, expected [indexName][shardId] but got " + shardIdString);
|
||||||
|
}
|
||||||
|
String indexName = shardIdString.substring(1, splitPosition);
|
||||||
|
int shardId = Integer.parseInt(shardIdString.substring(splitPosition + 2, shardIdString.length() - 1));
|
||||||
|
return new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), shardId);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
if (this == o) return true;
|
if (this == o) return true;
|
||||||
|
|
|
@ -855,7 +855,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||||
in.readLong(); // ttl
|
in.readLong(); // ttl
|
||||||
}
|
}
|
||||||
this.versionType = VersionType.fromValue(in.readByte());
|
this.versionType = VersionType.fromValue(in.readByte());
|
||||||
assert versionType.validateVersionForWrites(this.version);
|
assert versionType.validateVersionForWrites(this.version) : "invalid version for writes: " + this.version;
|
||||||
if (format >= FORMAT_AUTO_GENERATED_IDS) {
|
if (format >= FORMAT_AUTO_GENERATED_IDS) {
|
||||||
this.autoGeneratedIdTimestamp = in.readLong();
|
this.autoGeneratedIdTimestamp = in.readLong();
|
||||||
} else {
|
} else {
|
||||||
|
@ -1036,8 +1036,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||||
this.versionType = VersionType.fromValue(in.readByte());
|
this.versionType = VersionType.fromValue(in.readByte());
|
||||||
assert versionType.validateVersionForWrites(this.version);
|
assert versionType.validateVersionForWrites(this.version);
|
||||||
if (format >= FORMAT_SEQ_NO) {
|
if (format >= FORMAT_SEQ_NO) {
|
||||||
seqNo = in.readVLong();
|
seqNo = in.readLong();
|
||||||
primaryTerm = in.readVLong();
|
primaryTerm = in.readLong();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1100,8 +1100,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
||||||
out.writeString(uid.text());
|
out.writeString(uid.text());
|
||||||
out.writeLong(version);
|
out.writeLong(version);
|
||||||
out.writeByte(versionType.getValue());
|
out.writeByte(versionType.getValue());
|
||||||
out.writeVLong(seqNo);
|
out.writeLong(seqNo);
|
||||||
out.writeVLong(primaryTerm);
|
out.writeLong(primaryTerm);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -222,7 +222,7 @@ public class RecoverySourceHandler {
|
||||||
final long numDocsSource = recoverySourceMetadata.getNumDocs();
|
final long numDocsSource = recoverySourceMetadata.getNumDocs();
|
||||||
if (numDocsTarget != numDocsSource) {
|
if (numDocsTarget != numDocsSource) {
|
||||||
throw new IllegalStateException("try to recover " + request.shardId() + " from primary shard with sync id but number " +
|
throw new IllegalStateException("try to recover " + request.shardId() + " from primary shard with sync id but number " +
|
||||||
"of docs differ: " + numDocsTarget + " (" + request.sourceNode().getName() + ", primary) vs " + numDocsSource
|
"of docs differ: " + numDocsSource + " (" + request.sourceNode().getName() + ", primary) vs " + numDocsTarget
|
||||||
+ "(" + request.targetNode().getName() + ")");
|
+ "(" + request.targetNode().getName() + ")");
|
||||||
}
|
}
|
||||||
// we shortcut recovery here because we have nothing to copy. but we must still start the engine on the target.
|
// we shortcut recovery here because we have nothing to copy. but we must still start the engine on the target.
|
||||||
|
|
|
@ -119,7 +119,6 @@ import org.elasticsearch.repositories.RepositoriesModule;
|
||||||
import org.elasticsearch.script.ScriptModule;
|
import org.elasticsearch.script.ScriptModule;
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
import org.elasticsearch.search.SearchModule;
|
import org.elasticsearch.search.SearchModule;
|
||||||
import org.elasticsearch.search.SearchRequestParsers;
|
|
||||||
import org.elasticsearch.search.SearchService;
|
import org.elasticsearch.search.SearchService;
|
||||||
import org.elasticsearch.search.fetch.FetchPhase;
|
import org.elasticsearch.search.fetch.FetchPhase;
|
||||||
import org.elasticsearch.snapshots.SnapshotShardsService;
|
import org.elasticsearch.snapshots.SnapshotShardsService;
|
||||||
|
@ -379,8 +378,7 @@ public class Node implements Closeable {
|
||||||
|
|
||||||
Collection<Object> pluginComponents = pluginsService.filterPlugins(Plugin.class).stream()
|
Collection<Object> pluginComponents = pluginsService.filterPlugins(Plugin.class).stream()
|
||||||
.flatMap(p -> p.createComponents(client, clusterService, threadPool, resourceWatcherService,
|
.flatMap(p -> p.createComponents(client, clusterService, threadPool, resourceWatcherService,
|
||||||
scriptModule.getScriptService(), searchModule.getSearchRequestParsers(),
|
scriptModule.getScriptService(), xContentRegistry).stream())
|
||||||
xContentRegistry).stream())
|
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
Collection<UnaryOperator<Map<String, MetaData.Custom>>> customMetaDataUpgraders =
|
Collection<UnaryOperator<Map<String, MetaData.Custom>>> customMetaDataUpgraders =
|
||||||
pluginsService.filterPlugins(Plugin.class).stream()
|
pluginsService.filterPlugins(Plugin.class).stream()
|
||||||
|
@ -410,7 +408,6 @@ public class Node implements Closeable {
|
||||||
final DiscoveryModule discoveryModule = new DiscoveryModule(this.settings, threadPool, transportService,
|
final DiscoveryModule discoveryModule = new DiscoveryModule(this.settings, threadPool, transportService,
|
||||||
namedWriteableRegistry, networkService, clusterService, pluginsService.filterPlugins(DiscoveryPlugin.class));
|
namedWriteableRegistry, networkService, clusterService, pluginsService.filterPlugins(DiscoveryPlugin.class));
|
||||||
modules.add(b -> {
|
modules.add(b -> {
|
||||||
b.bind(SearchRequestParsers.class).toInstance(searchModule.getSearchRequestParsers());
|
|
||||||
b.bind(NamedXContentRegistry.class).toInstance(xContentRegistry);
|
b.bind(NamedXContentRegistry.class).toInstance(xContentRegistry);
|
||||||
b.bind(PluginsService.class).toInstance(pluginsService);
|
b.bind(PluginsService.class).toInstance(pluginsService);
|
||||||
b.bind(Client.class).toInstance(client);
|
b.bind(Client.class).toInstance(client);
|
||||||
|
@ -463,7 +460,8 @@ public class Node implements Closeable {
|
||||||
.map(injector::getInstance).collect(Collectors.toList()));
|
.map(injector::getInstance).collect(Collectors.toList()));
|
||||||
resourcesToClose.addAll(pluginLifecycleComponents);
|
resourcesToClose.addAll(pluginLifecycleComponents);
|
||||||
this.pluginLifecycleComponents = Collections.unmodifiableList(pluginLifecycleComponents);
|
this.pluginLifecycleComponents = Collections.unmodifiableList(pluginLifecycleComponents);
|
||||||
client.initialize(injector.getInstance(new Key<Map<GenericAction, TransportAction>>() {}));
|
client.initialize(injector.getInstance(new Key<Map<GenericAction, TransportAction>>() {}),
|
||||||
|
() -> clusterService.localNode().getId());
|
||||||
|
|
||||||
logger.info("initialized");
|
logger.info("initialized");
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,6 @@ import org.elasticsearch.repositories.RepositoriesModule;
|
||||||
import org.elasticsearch.script.ScriptModule;
|
import org.elasticsearch.script.ScriptModule;
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
import org.elasticsearch.search.SearchModule;
|
import org.elasticsearch.search.SearchModule;
|
||||||
import org.elasticsearch.search.SearchRequestParsers;
|
|
||||||
import org.elasticsearch.threadpool.ExecutorBuilder;
|
import org.elasticsearch.threadpool.ExecutorBuilder;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||||
|
@ -103,11 +102,10 @@ public abstract class Plugin implements Closeable {
|
||||||
* @param threadPool A service to allow retrieving an executor to run an async action
|
* @param threadPool A service to allow retrieving an executor to run an async action
|
||||||
* @param resourceWatcherService A service to watch for changes to node local files
|
* @param resourceWatcherService A service to watch for changes to node local files
|
||||||
* @param scriptService A service to allow running scripts on the local node
|
* @param scriptService A service to allow running scripts on the local node
|
||||||
* @param searchRequestParsers Parsers for search requests which may be used to templatize search requests
|
|
||||||
*/
|
*/
|
||||||
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
|
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
|
||||||
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
|
||||||
SearchRequestParsers searchRequestParsers, NamedXContentRegistry xContentRegistry) {
|
NamedXContentRegistry xContentRegistry) {
|
||||||
return Collections.emptyList();
|
return Collections.emptyList();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -176,8 +176,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
|
|
||||||
private static final String SNAPSHOT_CODEC = "snapshot";
|
private static final String SNAPSHOT_CODEC = "snapshot";
|
||||||
|
|
||||||
static final String SNAPSHOTS_FILE = "index"; // package private for unit testing
|
|
||||||
|
|
||||||
private static final String INDEX_FILE_PREFIX = "index-";
|
private static final String INDEX_FILE_PREFIX = "index-";
|
||||||
|
|
||||||
private static final String INDEX_LATEST_BLOB = "index.latest";
|
private static final String INDEX_LATEST_BLOB = "index.latest";
|
||||||
|
@ -373,7 +371,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
BlobPath indexPath = basePath().add("indices").add(indexId.getId());
|
BlobPath indexPath = basePath().add("indices").add(indexId.getId());
|
||||||
BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
|
BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
|
||||||
try {
|
try {
|
||||||
indexMetaDataFormat(snapshot.version()).delete(indexMetaDataBlobContainer, snapshotId.getUUID());
|
indexMetaDataFormat.delete(indexMetaDataBlobContainer, snapshotId.getUUID());
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] failed to delete metadata for index [{}]", snapshotId, index), ex);
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] failed to delete metadata for index [{}]", snapshotId, index), ex);
|
||||||
}
|
}
|
||||||
|
@ -421,7 +419,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
if (snapshotInfo != null) {
|
if (snapshotInfo != null) {
|
||||||
// we know the version the snapshot was created with
|
// we know the version the snapshot was created with
|
||||||
try {
|
try {
|
||||||
snapshotFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId);
|
snapshotFormat.delete(snapshotsBlobContainer, blobId);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] Unable to delete snapshot file [{}]", snapshotInfo.snapshotId(), blobId), e);
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] Unable to delete snapshot file [{}]", snapshotInfo.snapshotId(), blobId), e);
|
||||||
}
|
}
|
||||||
|
@ -439,7 +437,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
if (snapshotInfo != null) {
|
if (snapshotInfo != null) {
|
||||||
// we know the version the snapshot was created with
|
// we know the version the snapshot was created with
|
||||||
try {
|
try {
|
||||||
globalMetaDataFormat(snapshotInfo.version()).delete(snapshotsBlobContainer, blobId);
|
globalMetaDataFormat.delete(snapshotsBlobContainer, blobId);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] Unable to delete global metadata file [{}]", snapshotInfo.snapshotId(), blobId), e);
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] Unable to delete global metadata file [{}]", snapshotInfo.snapshotId(), blobId), e);
|
||||||
}
|
}
|
||||||
|
@ -522,7 +520,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
metaData = globalMetaDataFormat(snapshotVersion).read(snapshotsBlobContainer, snapshotId.getUUID());
|
metaData = globalMetaDataFormat.read(snapshotsBlobContainer, snapshotId.getUUID());
|
||||||
} catch (NoSuchFileException ex) {
|
} catch (NoSuchFileException ex) {
|
||||||
throw new SnapshotMissingException(metadata.name(), snapshotId, ex);
|
throw new SnapshotMissingException(metadata.name(), snapshotId, ex);
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
|
@ -533,7 +531,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
BlobPath indexPath = basePath().add("indices").add(index.getId());
|
BlobPath indexPath = basePath().add("indices").add(index.getId());
|
||||||
BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
|
BlobContainer indexMetaDataBlobContainer = blobStore().blobContainer(indexPath);
|
||||||
try {
|
try {
|
||||||
metaDataBuilder.put(indexMetaDataFormat(snapshotVersion).read(indexMetaDataBlobContainer, snapshotId.getUUID()), false);
|
metaDataBuilder.put(indexMetaDataFormat.read(indexMetaDataBlobContainer, snapshotId.getUUID()), false);
|
||||||
} catch (ElasticsearchParseException | IOException ex) {
|
} catch (ElasticsearchParseException | IOException ex) {
|
||||||
if (ignoreIndexErrors) {
|
if (ignoreIndexErrors) {
|
||||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] [{}] failed to read metadata for index", snapshotId, index.getName()), ex);
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] [{}] failed to read metadata for index", snapshotId, index.getName()), ex);
|
||||||
|
@ -563,27 +561,6 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns appropriate global metadata format based on the provided version of the snapshot
|
|
||||||
*/
|
|
||||||
private BlobStoreFormat<MetaData> globalMetaDataFormat(Version version) {
|
|
||||||
return globalMetaDataFormat;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns appropriate snapshot format based on the provided version of the snapshot
|
|
||||||
*/
|
|
||||||
private BlobStoreFormat<SnapshotInfo> snapshotFormat(Version version) {
|
|
||||||
return snapshotFormat;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns appropriate index metadata format based on the provided version of the snapshot
|
|
||||||
*/
|
|
||||||
private BlobStoreFormat<IndexMetaData> indexMetaDataFormat(Version version) {
|
|
||||||
return indexMetaDataFormat;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getSnapshotThrottleTimeInNanos() {
|
public long getSnapshotThrottleTimeInNanos() {
|
||||||
return snapshotRateLimitingTimeInNanos.count();
|
return snapshotRateLimitingTimeInNanos.count();
|
||||||
|
@ -643,6 +620,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
// EMPTY is safe here because RepositoryData#fromXContent calls namedObject
|
// EMPTY is safe here because RepositoryData#fromXContent calls namedObject
|
||||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, out.bytes())) {
|
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, out.bytes())) {
|
||||||
repositoryData = RepositoryData.snapshotsFromXContent(parser, indexGen);
|
repositoryData = RepositoryData.snapshotsFromXContent(parser, indexGen);
|
||||||
|
} catch (NotXContentException e) {
|
||||||
|
logger.warn("[{}] index blob is not valid x-content [{} bytes]", snapshotsIndexBlobName, out.bytes().length());
|
||||||
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -123,29 +123,21 @@ public class BytesRestResponse extends RestResponse {
|
||||||
} else if (channel.detailedErrorsEnabled()) {
|
} else if (channel.detailedErrorsEnabled()) {
|
||||||
final ToXContent.Params params;
|
final ToXContent.Params params;
|
||||||
if (channel.request().paramAsBoolean("error_trace", !ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT)) {
|
if (channel.request().paramAsBoolean("error_trace", !ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT)) {
|
||||||
params = new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request());
|
params = new ToXContent.DelegatingMapParams(
|
||||||
|
Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_STACK_TRACE, "false"), channel.request());
|
||||||
} else {
|
} else {
|
||||||
if (status.getStatus() < 500) {
|
if (status.getStatus() < 500) {
|
||||||
SUPPRESSED_ERROR_LOGGER.debug((Supplier<?>) () -> new ParameterizedMessage("path: {}, params: {}", channel.request().rawPath(), channel.request().params()), e);
|
SUPPRESSED_ERROR_LOGGER.debug(
|
||||||
|
(Supplier<?>) () -> new ParameterizedMessage("path: {}, params: {}",
|
||||||
|
channel.request().rawPath(), channel.request().params()), e);
|
||||||
} else {
|
} else {
|
||||||
SUPPRESSED_ERROR_LOGGER.warn((Supplier<?>) () -> new ParameterizedMessage("path: {}, params: {}", channel.request().rawPath(), channel.request().params()), e);
|
SUPPRESSED_ERROR_LOGGER.warn(
|
||||||
|
(Supplier<?>) () -> new ParameterizedMessage("path: {}, params: {}",
|
||||||
|
channel.request().rawPath(), channel.request().params()), e);
|
||||||
}
|
}
|
||||||
params = channel.request();
|
params = channel.request();
|
||||||
}
|
}
|
||||||
builder.field("error");
|
ElasticsearchException.renderException(builder, params, e);
|
||||||
builder.startObject();
|
|
||||||
final ElasticsearchException[] rootCauses = ElasticsearchException.guessRootCauses(e);
|
|
||||||
builder.field("root_cause");
|
|
||||||
builder.startArray();
|
|
||||||
for (ElasticsearchException rootCause : rootCauses){
|
|
||||||
builder.startObject();
|
|
||||||
rootCause.toXContent(builder, new ToXContent.DelegatingMapParams(Collections.singletonMap(ElasticsearchException.REST_EXCEPTION_SKIP_CAUSE, "true"), params));
|
|
||||||
builder.endObject();
|
|
||||||
}
|
|
||||||
builder.endArray();
|
|
||||||
|
|
||||||
ElasticsearchException.toXContent(builder, params, e);
|
|
||||||
builder.endObject();
|
|
||||||
} else {
|
} else {
|
||||||
builder.field("error", simpleMessage(e));
|
builder.field("error", simpleMessage(e));
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,6 @@ import org.elasticsearch.rest.BaseRestHandler;
|
||||||
import org.elasticsearch.rest.RestController;
|
import org.elasticsearch.rest.RestController;
|
||||||
import org.elasticsearch.rest.RestRequest;
|
import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||||
import org.elasticsearch.search.SearchRequestParsers;
|
|
||||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -53,12 +52,10 @@ import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||||
public class RestMultiSearchAction extends BaseRestHandler {
|
public class RestMultiSearchAction extends BaseRestHandler {
|
||||||
|
|
||||||
private final boolean allowExplicitIndex;
|
private final boolean allowExplicitIndex;
|
||||||
private final SearchRequestParsers searchRequestParsers;
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public RestMultiSearchAction(Settings settings, RestController controller, SearchRequestParsers searchRequestParsers) {
|
public RestMultiSearchAction(Settings settings, RestController controller) {
|
||||||
super(settings);
|
super(settings);
|
||||||
this.searchRequestParsers = searchRequestParsers;
|
|
||||||
|
|
||||||
controller.registerHandler(GET, "/_msearch", this);
|
controller.registerHandler(GET, "/_msearch", this);
|
||||||
controller.registerHandler(POST, "/_msearch", this);
|
controller.registerHandler(POST, "/_msearch", this);
|
||||||
|
@ -72,7 +69,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||||
MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, searchRequestParsers, parseFieldMatcher);
|
MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex, parseFieldMatcher);
|
||||||
return channel -> client.multiSearch(multiSearchRequest, new RestToXContentListener<>(channel));
|
return channel -> client.multiSearch(multiSearchRequest, new RestToXContentListener<>(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,7 +77,6 @@ public class RestMultiSearchAction extends BaseRestHandler {
|
||||||
* Parses a {@link RestRequest} body and returns a {@link MultiSearchRequest}
|
* Parses a {@link RestRequest} body and returns a {@link MultiSearchRequest}
|
||||||
*/
|
*/
|
||||||
public static MultiSearchRequest parseRequest(RestRequest restRequest, boolean allowExplicitIndex,
|
public static MultiSearchRequest parseRequest(RestRequest restRequest, boolean allowExplicitIndex,
|
||||||
SearchRequestParsers searchRequestParsers,
|
|
||||||
ParseFieldMatcher parseFieldMatcher) throws IOException {
|
ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||||
MultiSearchRequest multiRequest = new MultiSearchRequest();
|
MultiSearchRequest multiRequest = new MultiSearchRequest();
|
||||||
if (restRequest.hasParam("max_concurrent_searches")) {
|
if (restRequest.hasParam("max_concurrent_searches")) {
|
||||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.rest.action.RestActions;
|
import org.elasticsearch.rest.action.RestActions;
|
||||||
import org.elasticsearch.rest.action.RestStatusToXContentListener;
|
import org.elasticsearch.rest.action.RestStatusToXContentListener;
|
||||||
import org.elasticsearch.search.Scroll;
|
import org.elasticsearch.search.Scroll;
|
||||||
import org.elasticsearch.search.SearchRequestParsers;
|
|
||||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||||
|
@ -54,13 +53,9 @@ import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||||
import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion;
|
import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion;
|
||||||
|
|
||||||
public class RestSearchAction extends BaseRestHandler {
|
public class RestSearchAction extends BaseRestHandler {
|
||||||
|
|
||||||
private final SearchRequestParsers searchRequestParsers;
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public RestSearchAction(Settings settings, RestController controller, SearchRequestParsers searchRequestParsers) {
|
public RestSearchAction(Settings settings, RestController controller) {
|
||||||
super(settings);
|
super(settings);
|
||||||
this.searchRequestParsers = searchRequestParsers;
|
|
||||||
controller.registerHandler(GET, "/_search", this);
|
controller.registerHandler(GET, "/_search", this);
|
||||||
controller.registerHandler(POST, "/_search", this);
|
controller.registerHandler(POST, "/_search", this);
|
||||||
controller.registerHandler(GET, "/{index}/_search", this);
|
controller.registerHandler(GET, "/{index}/_search", this);
|
||||||
|
@ -73,7 +68,7 @@ public class RestSearchAction extends BaseRestHandler {
|
||||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||||
SearchRequest searchRequest = new SearchRequest();
|
SearchRequest searchRequest = new SearchRequest();
|
||||||
request.withContentOrSourceParamParserOrNull(parser ->
|
request.withContentOrSourceParamParserOrNull(parser ->
|
||||||
parseSearchRequest(searchRequest, request, searchRequestParsers, parseFieldMatcher, parser));
|
parseSearchRequest(searchRequest, request, parseFieldMatcher, parser));
|
||||||
|
|
||||||
return channel -> client.search(searchRequest, new RestStatusToXContentListener<>(channel));
|
return channel -> client.search(searchRequest, new RestStatusToXContentListener<>(channel));
|
||||||
}
|
}
|
||||||
|
@ -84,8 +79,8 @@ public class RestSearchAction extends BaseRestHandler {
|
||||||
* @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request}
|
* @param requestContentParser body of the request to read. This method does not attempt to read the body from the {@code request}
|
||||||
* parameter
|
* parameter
|
||||||
*/
|
*/
|
||||||
public static void parseSearchRequest(SearchRequest searchRequest, RestRequest request, SearchRequestParsers searchRequestParsers,
|
public static void parseSearchRequest(SearchRequest searchRequest, RestRequest request, ParseFieldMatcher parseFieldMatcher,
|
||||||
ParseFieldMatcher parseFieldMatcher, XContentParser requestContentParser) throws IOException {
|
XContentParser requestContentParser) throws IOException {
|
||||||
|
|
||||||
if (searchRequest.source() == null) {
|
if (searchRequest.source() == null) {
|
||||||
searchRequest.source(new SearchSourceBuilder());
|
searchRequest.source(new SearchSourceBuilder());
|
||||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
import org.elasticsearch.common.text.Text;
|
import org.elasticsearch.common.text.Text;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -34,7 +34,7 @@ import java.util.Map;
|
||||||
*
|
*
|
||||||
* @see SearchHits
|
* @see SearchHits
|
||||||
*/
|
*/
|
||||||
public interface SearchHit extends Streamable, ToXContent, Iterable<SearchHitField> {
|
public interface SearchHit extends Streamable, ToXContentObject, Iterable<SearchHitField> {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The score.
|
* The score.
|
||||||
|
|
|
@ -276,7 +276,6 @@ public class SearchModule {
|
||||||
private final Settings settings;
|
private final Settings settings;
|
||||||
private final List<NamedWriteableRegistry.Entry> namedWriteables = new ArrayList<>();
|
private final List<NamedWriteableRegistry.Entry> namedWriteables = new ArrayList<>();
|
||||||
private final List<NamedXContentRegistry.Entry> namedXContents = new ArrayList<>();
|
private final List<NamedXContentRegistry.Entry> namedXContents = new ArrayList<>();
|
||||||
private final SearchRequestParsers searchRequestParsers;
|
|
||||||
|
|
||||||
public SearchModule(Settings settings, boolean transportClient, List<SearchPlugin> plugins) {
|
public SearchModule(Settings settings, boolean transportClient, List<SearchPlugin> plugins) {
|
||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
|
@ -295,7 +294,6 @@ public class SearchModule {
|
||||||
registerFetchSubPhases(plugins);
|
registerFetchSubPhases(plugins);
|
||||||
registerSearchExts(plugins);
|
registerSearchExts(plugins);
|
||||||
registerShapes();
|
registerShapes();
|
||||||
searchRequestParsers = new SearchRequestParsers();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
||||||
|
@ -306,10 +304,6 @@ public class SearchModule {
|
||||||
return namedXContents;
|
return namedXContents;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchRequestParsers getSearchRequestParsers() {
|
|
||||||
return searchRequestParsers;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the {@link Highlighter} registry
|
* Returns the {@link Highlighter} registry
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.search;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A container for all parsers used to parse
|
|
||||||
* {@link org.elasticsearch.action.search.SearchRequest} objects from a rest request.
|
|
||||||
*/
|
|
||||||
public class SearchRequestParsers {
|
|
||||||
public SearchRequestParsers() {
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.text.Text;
|
import org.elasticsearch.common.text.Text;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
|
@ -35,21 +34,20 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class SearchShardTarget implements Writeable, Comparable<SearchShardTarget> {
|
public class SearchShardTarget implements Writeable, Comparable<SearchShardTarget> {
|
||||||
|
|
||||||
private Text nodeId;
|
private final Text nodeId;
|
||||||
private Text index;
|
private final ShardId shardId;
|
||||||
private ShardId shardId;
|
|
||||||
|
|
||||||
public SearchShardTarget(StreamInput in) throws IOException {
|
public SearchShardTarget(StreamInput in) throws IOException {
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
nodeId = in.readText();
|
nodeId = in.readText();
|
||||||
|
} else {
|
||||||
|
nodeId = null;
|
||||||
}
|
}
|
||||||
shardId = ShardId.readShardId(in);
|
shardId = ShardId.readShardId(in);
|
||||||
index = new Text(shardId.getIndexName());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchShardTarget(String nodeId, ShardId shardId) {
|
public SearchShardTarget(String nodeId, ShardId shardId) {
|
||||||
this.nodeId = nodeId == null ? null : new Text(nodeId);
|
this.nodeId = nodeId == null ? null : new Text(nodeId);
|
||||||
this.index = new Text(shardId.getIndexName());
|
|
||||||
this.shardId = shardId;
|
this.shardId = shardId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,33 +56,16 @@ public class SearchShardTarget implements Writeable, Comparable<SearchShardTarge
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
public String nodeId() {
|
public String getNodeId() {
|
||||||
return nodeId.string();
|
return nodeId.string();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
public Text getNodeIdText() {
|
||||||
public String getNodeId() {
|
|
||||||
return nodeId();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Text nodeIdText() {
|
|
||||||
return this.nodeId;
|
return this.nodeId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String index() {
|
|
||||||
return index.string();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getIndex() {
|
public String getIndex() {
|
||||||
return index();
|
return shardId.getIndexName();
|
||||||
}
|
|
||||||
|
|
||||||
public Text indexText() {
|
|
||||||
return this.index;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ShardId shardId() {
|
|
||||||
return shardId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public ShardId getShardId() {
|
public ShardId getShardId() {
|
||||||
|
@ -93,7 +74,7 @@ public class SearchShardTarget implements Writeable, Comparable<SearchShardTarge
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareTo(SearchShardTarget o) {
|
public int compareTo(SearchShardTarget o) {
|
||||||
int i = index.string().compareTo(o.index());
|
int i = shardId.getIndexName().compareTo(o.getIndex());
|
||||||
if (i == 0) {
|
if (i == 0) {
|
||||||
i = shardId.getId() - o.shardId.id();
|
i = shardId.getId() - o.shardId.id();
|
||||||
}
|
}
|
||||||
|
@ -125,7 +106,7 @@ public class SearchShardTarget implements Writeable, Comparable<SearchShardTarge
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
int result = nodeId != null ? nodeId.hashCode() : 0;
|
int result = nodeId != null ? nodeId.hashCode() : 0;
|
||||||
result = 31 * result + (index != null ? index.hashCode() : 0);
|
result = 31 * result + (shardId.getIndexName() != null ? shardId.getIndexName().hashCode() : 0);
|
||||||
result = 31 * result + shardId.hashCode();
|
result = 31 * result + shardId.hashCode();
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -40,17 +39,16 @@ public abstract class AbstractAggregationBuilder<AB extends AbstractAggregationB
|
||||||
* Constructs a new aggregation builder.
|
* Constructs a new aggregation builder.
|
||||||
*
|
*
|
||||||
* @param name The aggregation name
|
* @param name The aggregation name
|
||||||
* @param type The aggregation type
|
|
||||||
*/
|
*/
|
||||||
public AbstractAggregationBuilder(String name, Type type) {
|
public AbstractAggregationBuilder(String name) {
|
||||||
super(name, type);
|
super(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
protected AbstractAggregationBuilder(StreamInput in, Type type) throws IOException {
|
protected AbstractAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in.readString(), type);
|
super(in.readString());
|
||||||
factoriesBuilder = new AggregatorFactories.Builder(in);
|
factoriesBuilder = new AggregatorFactories.Builder(in);
|
||||||
metaData = in.readMap();
|
metaData = in.readMap();
|
||||||
}
|
}
|
||||||
|
@ -118,8 +116,9 @@ public abstract class AbstractAggregationBuilder<AB extends AbstractAggregationB
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getType() {
|
public final String getWriteableName() {
|
||||||
return type.name();
|
// We always use the type of the aggregation as the writeable name
|
||||||
|
return getType();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -138,7 +137,7 @@ public abstract class AbstractAggregationBuilder<AB extends AbstractAggregationB
|
||||||
if (this.metaData != null) {
|
if (this.metaData != null) {
|
||||||
builder.field("meta", this.metaData);
|
builder.field("meta", this.metaData);
|
||||||
}
|
}
|
||||||
builder.field(type.name());
|
builder.field(getType());
|
||||||
internalXContent(builder, params);
|
internalXContent(builder, params);
|
||||||
|
|
||||||
if (factoriesBuilder != null && (factoriesBuilder.count()) > 0) {
|
if (factoriesBuilder != null && (factoriesBuilder.count()) > 0) {
|
||||||
|
@ -154,7 +153,7 @@ public abstract class AbstractAggregationBuilder<AB extends AbstractAggregationB
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(factoriesBuilder, metaData, name, type, doHashCode());
|
return Objects.hash(factoriesBuilder, metaData, name, doHashCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract int doHashCode();
|
protected abstract int doHashCode();
|
||||||
|
@ -169,8 +168,6 @@ public abstract class AbstractAggregationBuilder<AB extends AbstractAggregationB
|
||||||
AbstractAggregationBuilder<AB> other = (AbstractAggregationBuilder<AB>) obj;
|
AbstractAggregationBuilder<AB> other = (AbstractAggregationBuilder<AB>) obj;
|
||||||
if (!Objects.equals(name, other.name))
|
if (!Objects.equals(name, other.name))
|
||||||
return false;
|
return false;
|
||||||
if (!Objects.equals(type, other.type))
|
|
||||||
return false;
|
|
||||||
if (!Objects.equals(metaData, other.metaData))
|
if (!Objects.equals(metaData, other.metaData))
|
||||||
return false;
|
return false;
|
||||||
if (!Objects.equals(factoriesBuilder, other.factoriesBuilder))
|
if (!Objects.equals(factoriesBuilder, other.factoriesBuilder))
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -38,24 +37,18 @@ public abstract class AggregationBuilder
|
||||||
implements NamedWriteable, ToXContent, BaseAggregationBuilder {
|
implements NamedWriteable, ToXContent, BaseAggregationBuilder {
|
||||||
|
|
||||||
protected final String name;
|
protected final String name;
|
||||||
protected final Type type;
|
|
||||||
protected AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder();
|
protected AggregatorFactories.Builder factoriesBuilder = AggregatorFactories.builder();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a new aggregation builder.
|
* Constructs a new aggregation builder.
|
||||||
*
|
*
|
||||||
* @param name The aggregation name
|
* @param name The aggregation name
|
||||||
* @param type The aggregation type
|
|
||||||
*/
|
*/
|
||||||
protected AggregationBuilder(String name, Type type) {
|
protected AggregationBuilder(String name) {
|
||||||
if (name == null) {
|
if (name == null) {
|
||||||
throw new IllegalArgumentException("[name] must not be null: [" + name + "]");
|
throw new IllegalArgumentException("[name] must not be null: [" + name + "]");
|
||||||
}
|
}
|
||||||
if (type == null) {
|
|
||||||
throw new IllegalArgumentException("[type] must not be null: [" + name + "]");
|
|
||||||
}
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.type = type;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return this aggregation's name. */
|
/** Return this aggregation's name. */
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.search.Scorer;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.util.BigArrays;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
import org.elasticsearch.common.util.ObjectArray;
|
import org.elasticsearch.common.util.ObjectArray;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||||
|
@ -163,7 +162,6 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final String name;
|
protected final String name;
|
||||||
protected final Type type;
|
|
||||||
protected final AggregatorFactory<?> parent;
|
protected final AggregatorFactory<?> parent;
|
||||||
protected final AggregatorFactories factories;
|
protected final AggregatorFactories factories;
|
||||||
protected final Map<String, Object> metaData;
|
protected final Map<String, Object> metaData;
|
||||||
|
@ -174,15 +172,12 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> {
|
||||||
*
|
*
|
||||||
* @param name
|
* @param name
|
||||||
* The aggregation name
|
* The aggregation name
|
||||||
* @param type
|
|
||||||
* The aggregation type
|
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* if an error occurs creating the factory
|
* if an error occurs creating the factory
|
||||||
*/
|
*/
|
||||||
public AggregatorFactory(String name, Type type, SearchContext context, AggregatorFactory<?> parent,
|
public AggregatorFactory(String name, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.type = type;
|
|
||||||
this.context = context;
|
this.context = context;
|
||||||
this.parent = parent;
|
this.parent = parent;
|
||||||
this.factories = subFactoriesBuilder.build(context, this);
|
this.factories = subFactoriesBuilder.build(context, this);
|
||||||
|
@ -226,10 +221,6 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> {
|
||||||
return createInternal(parent, collectsFromSingleBucket, this.factories.createPipelineAggregators(), this.metaData);
|
return createInternal(parent, collectsFromSingleBucket, this.factories.createPipelineAggregators(), this.metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getType() {
|
|
||||||
return type.name();
|
|
||||||
}
|
|
||||||
|
|
||||||
public AggregatorFactory<?> getParent() {
|
public AggregatorFactory<?> getParent() {
|
||||||
return parent;
|
return parent;
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,33 +38,6 @@ import java.util.Objects;
|
||||||
* An internal implementation of {@link Aggregation}. Serves as a base class for all aggregation implementations.
|
* An internal implementation of {@link Aggregation}. Serves as a base class for all aggregation implementations.
|
||||||
*/
|
*/
|
||||||
public abstract class InternalAggregation implements Aggregation, ToXContent, NamedWriteable {
|
public abstract class InternalAggregation implements Aggregation, ToXContent, NamedWriteable {
|
||||||
/**
|
|
||||||
* The aggregation type that holds all the string types that are associated with an aggregation:
|
|
||||||
* <ul>
|
|
||||||
* <li>name - used as the parser type</li>
|
|
||||||
* </ul>
|
|
||||||
*/
|
|
||||||
public static class Type {
|
|
||||||
private final String name;
|
|
||||||
|
|
||||||
public Type(String name) {
|
|
||||||
this.name = name;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return The name of the type of aggregation. This is the key for parsing the aggregation from XContent and is the name of the
|
|
||||||
* aggregation's builder when serialized.
|
|
||||||
*/
|
|
||||||
public String name() {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class ReduceContext {
|
public static class ReduceContext {
|
||||||
|
|
||||||
private final BigArrays bigArrays;
|
private final BigArrays bigArrays;
|
||||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.index.mapper.ParentFieldMapper;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.FieldContext;
|
import org.elasticsearch.search.aggregations.support.FieldContext;
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild;
|
||||||
|
@ -46,7 +45,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<ParentChild, ChildrenAggregationBuilder> {
|
public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<ParentChild, ChildrenAggregationBuilder> {
|
||||||
public static final String NAME = "children";
|
public static final String NAME = "children";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private String parentType;
|
private String parentType;
|
||||||
private final String childType;
|
private final String childType;
|
||||||
|
@ -60,7 +58,7 @@ public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<P
|
||||||
* the type of children documents
|
* the type of children documents
|
||||||
*/
|
*/
|
||||||
public ChildrenAggregationBuilder(String name, String childType) {
|
public ChildrenAggregationBuilder(String name, String childType) {
|
||||||
super(name, TYPE, ValuesSourceType.BYTES, ValueType.STRING);
|
super(name, ValuesSourceType.BYTES, ValueType.STRING);
|
||||||
if (childType == null) {
|
if (childType == null) {
|
||||||
throw new IllegalArgumentException("[childType] must not be null: [" + name + "]");
|
throw new IllegalArgumentException("[childType] must not be null: [" + name + "]");
|
||||||
}
|
}
|
||||||
|
@ -71,7 +69,7 @@ public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<P
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public ChildrenAggregationBuilder(StreamInput in) throws IOException {
|
public ChildrenAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.BYTES, ValueType.STRING);
|
super(in, ValuesSourceType.BYTES, ValueType.STRING);
|
||||||
childType = in.readString();
|
childType = in.readString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +81,7 @@ public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<P
|
||||||
@Override
|
@Override
|
||||||
protected ValuesSourceAggregatorFactory<ParentChild, ?> innerBuild(SearchContext context,
|
protected ValuesSourceAggregatorFactory<ParentChild, ?> innerBuild(SearchContext context,
|
||||||
ValuesSourceConfig<ParentChild> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
ValuesSourceConfig<ParentChild> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new ChildrenAggregatorFactory(name, type, config, parentType, childFilter, parentFilter, context, parent,
|
return new ChildrenAggregatorFactory(name, config, parentType, childFilter, parentFilter, context, parent,
|
||||||
subFactoriesBuilder, metaData);
|
subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,7 +161,7 @@ public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<P
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
|
@ -44,10 +43,10 @@ public class ChildrenAggregatorFactory
|
||||||
private final Query parentFilter;
|
private final Query parentFilter;
|
||||||
private final Query childFilter;
|
private final Query childFilter;
|
||||||
|
|
||||||
public ChildrenAggregatorFactory(String name, Type type, ValuesSourceConfig<ParentChild> config, String parentType, Query childFilter,
|
public ChildrenAggregatorFactory(String name, ValuesSourceConfig<ParentChild> config, String parentType, Query childFilter,
|
||||||
Query parentFilter, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
Query parentFilter, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.parentType = parentType;
|
this.parentType = parentType;
|
||||||
this.childFilter = childFilter;
|
this.childFilter = childFilter;
|
||||||
this.parentFilter = parentFilter;
|
this.parentFilter = parentFilter;
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -35,7 +34,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class FilterAggregationBuilder extends AbstractAggregationBuilder<FilterAggregationBuilder> {
|
public class FilterAggregationBuilder extends AbstractAggregationBuilder<FilterAggregationBuilder> {
|
||||||
public static final String NAME = "filter";
|
public static final String NAME = "filter";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private final QueryBuilder filter;
|
private final QueryBuilder filter;
|
||||||
|
|
||||||
|
@ -48,7 +46,7 @@ public class FilterAggregationBuilder extends AbstractAggregationBuilder<FilterA
|
||||||
* {@link Filter} aggregation.
|
* {@link Filter} aggregation.
|
||||||
*/
|
*/
|
||||||
public FilterAggregationBuilder(String name, QueryBuilder filter) {
|
public FilterAggregationBuilder(String name, QueryBuilder filter) {
|
||||||
super(name, TYPE);
|
super(name);
|
||||||
if (filter == null) {
|
if (filter == null) {
|
||||||
throw new IllegalArgumentException("[filter] must not be null: [" + name + "]");
|
throw new IllegalArgumentException("[filter] must not be null: [" + name + "]");
|
||||||
}
|
}
|
||||||
|
@ -59,7 +57,7 @@ public class FilterAggregationBuilder extends AbstractAggregationBuilder<FilterA
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public FilterAggregationBuilder(StreamInput in) throws IOException {
|
public FilterAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE);
|
super(in);
|
||||||
filter = in.readNamedWriteable(QueryBuilder.class);
|
filter = in.readNamedWriteable(QueryBuilder.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,7 +71,7 @@ public class FilterAggregationBuilder extends AbstractAggregationBuilder<FilterA
|
||||||
AggregatorFactories.Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder) throws IOException {
|
||||||
// TODO this sucks we need a rewrite phase for aggregations too
|
// TODO this sucks we need a rewrite phase for aggregations too
|
||||||
final QueryBuilder rewrittenFilter = QueryBuilder.rewriteQuery(filter, context.getQueryShardContext());
|
final QueryBuilder rewrittenFilter = QueryBuilder.rewriteQuery(filter, context.getQueryShardContext());
|
||||||
return new FilterAggregatorFactory(name, type, rewrittenFilter, context, parent, subFactoriesBuilder, metaData);
|
return new FilterAggregatorFactory(name, rewrittenFilter, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -101,7 +99,7 @@ public class FilterAggregationBuilder extends AbstractAggregationBuilder<FilterA
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
@ -38,9 +37,9 @@ public class FilterAggregatorFactory extends AggregatorFactory<FilterAggregatorF
|
||||||
|
|
||||||
private final Weight weight;
|
private final Weight weight;
|
||||||
|
|
||||||
public FilterAggregatorFactory(String name, Type type, QueryBuilder filterBuilder, SearchContext context,
|
public FilterAggregatorFactory(String name, QueryBuilder filterBuilder, SearchContext context,
|
||||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, context, parent, subFactoriesBuilder, metaData);
|
super(name, context, parent, subFactoriesBuilder, metaData);
|
||||||
IndexSearcher contextSearcher = context.searcher();
|
IndexSearcher contextSearcher = context.searcher();
|
||||||
Query filter = filterBuilder.toQuery(context.getQueryShardContext());
|
Query filter = filterBuilder.toQuery(context.getQueryShardContext());
|
||||||
weight = contextSearcher.createNormalizedWeight(filter, false);
|
weight = contextSearcher.createNormalizedWeight(filter, false);
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
@ -43,7 +42,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class FiltersAggregationBuilder extends AbstractAggregationBuilder<FiltersAggregationBuilder> {
|
public class FiltersAggregationBuilder extends AbstractAggregationBuilder<FiltersAggregationBuilder> {
|
||||||
public static final String NAME = "filters";
|
public static final String NAME = "filters";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ParseField FILTERS_FIELD = new ParseField("filters");
|
private static final ParseField FILTERS_FIELD = new ParseField("filters");
|
||||||
private static final ParseField OTHER_BUCKET_FIELD = new ParseField("other_bucket");
|
private static final ParseField OTHER_BUCKET_FIELD = new ParseField("other_bucket");
|
||||||
|
@ -65,7 +63,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
|
||||||
}
|
}
|
||||||
|
|
||||||
private FiltersAggregationBuilder(String name, List<KeyedFilter> filters) {
|
private FiltersAggregationBuilder(String name, List<KeyedFilter> filters) {
|
||||||
super(name, TYPE);
|
super(name);
|
||||||
// internally we want to have a fixed order of filters, regardless of the order of the filters in the request
|
// internally we want to have a fixed order of filters, regardless of the order of the filters in the request
|
||||||
this.filters = new ArrayList<>(filters);
|
this.filters = new ArrayList<>(filters);
|
||||||
Collections.sort(this.filters, (KeyedFilter kf1, KeyedFilter kf2) -> kf1.key().compareTo(kf2.key()));
|
Collections.sort(this.filters, (KeyedFilter kf1, KeyedFilter kf2) -> kf1.key().compareTo(kf2.key()));
|
||||||
|
@ -79,7 +77,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
|
||||||
* the filters to use with this aggregation
|
* the filters to use with this aggregation
|
||||||
*/
|
*/
|
||||||
public FiltersAggregationBuilder(String name, QueryBuilder... filters) {
|
public FiltersAggregationBuilder(String name, QueryBuilder... filters) {
|
||||||
super(name, TYPE);
|
super(name);
|
||||||
List<KeyedFilter> keyedFilters = new ArrayList<>(filters.length);
|
List<KeyedFilter> keyedFilters = new ArrayList<>(filters.length);
|
||||||
for (int i = 0; i < filters.length; i++) {
|
for (int i = 0; i < filters.length; i++) {
|
||||||
keyedFilters.add(new KeyedFilter(String.valueOf(i), filters[i]));
|
keyedFilters.add(new KeyedFilter(String.valueOf(i), filters[i]));
|
||||||
|
@ -92,7 +90,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public FiltersAggregationBuilder(StreamInput in) throws IOException {
|
public FiltersAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE);
|
super(in);
|
||||||
keyed = in.readBoolean();
|
keyed = in.readBoolean();
|
||||||
int filtersSize = in.readVInt();
|
int filtersSize = in.readVInt();
|
||||||
filters = new ArrayList<>(filtersSize);
|
filters = new ArrayList<>(filtersSize);
|
||||||
|
@ -176,7 +174,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
|
||||||
rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(),
|
rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(),
|
||||||
context.getQueryShardContext())));
|
context.getQueryShardContext())));
|
||||||
}
|
}
|
||||||
return new FiltersAggregatorFactory(name, type, rewrittenFilters, keyed, otherBucket, otherBucketKey, context, parent,
|
return new FiltersAggregatorFactory(name, rewrittenFilters, keyed, otherBucket, otherBucketKey, context, parent,
|
||||||
subFactoriesBuilder, metaData);
|
subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -300,7 +298,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.search.Weight;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
@ -42,10 +41,10 @@ public class FiltersAggregatorFactory extends AggregatorFactory<FiltersAggregato
|
||||||
private final boolean otherBucket;
|
private final boolean otherBucket;
|
||||||
private final String otherBucketKey;
|
private final String otherBucketKey;
|
||||||
|
|
||||||
public FiltersAggregatorFactory(String name, Type type, List<KeyedFilter> filters, boolean keyed, boolean otherBucket,
|
public FiltersAggregatorFactory(String name, List<KeyedFilter> filters, boolean keyed, boolean otherBucket,
|
||||||
String otherBucketKey, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories,
|
String otherBucketKey, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, context, parent, subFactories, metaData);
|
super(name, context, parent, subFactories, metaData);
|
||||||
this.keyed = keyed;
|
this.keyed = keyed;
|
||||||
this.otherBucket = otherBucket;
|
this.otherBucket = otherBucket;
|
||||||
this.otherBucketKey = otherBucketKey;
|
this.otherBucketKey = otherBucketKey;
|
||||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.index.fielddata.SortingNumericDocValues;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
|
@ -51,7 +50,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoGridAggregationBuilder> {
|
public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoGridAggregationBuilder> {
|
||||||
public static final String NAME = "geohash_grid";
|
public static final String NAME = "geohash_grid";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
public static final int DEFAULT_PRECISION = 5;
|
public static final int DEFAULT_PRECISION = 5;
|
||||||
public static final int DEFAULT_MAX_NUM_CELLS = 10000;
|
public static final int DEFAULT_MAX_NUM_CELLS = 10000;
|
||||||
|
|
||||||
|
@ -73,14 +71,14 @@ public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<Va
|
||||||
private int shardSize = -1;
|
private int shardSize = -1;
|
||||||
|
|
||||||
public GeoGridAggregationBuilder(String name) {
|
public GeoGridAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
super(name, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public GeoGridAggregationBuilder(StreamInput in) throws IOException {
|
public GeoGridAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
super(in, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||||
precision = in.readVInt();
|
precision = in.readVInt();
|
||||||
requiredSize = in.readVInt();
|
requiredSize = in.readVInt();
|
||||||
shardSize = in.readVInt();
|
shardSize = in.readVInt();
|
||||||
|
@ -150,7 +148,7 @@ public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<Va
|
||||||
if (shardSize < requiredSize) {
|
if (shardSize < requiredSize) {
|
||||||
shardSize = requiredSize;
|
shardSize = requiredSize;
|
||||||
}
|
}
|
||||||
return new GeoHashGridAggregatorFactory(name, type, config, precision, requiredSize, shardSize, context, parent,
|
return new GeoHashGridAggregatorFactory(name, config, precision, requiredSize, shardSize, context, parent,
|
||||||
subFactoriesBuilder, metaData);
|
subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,7 +183,7 @@ public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<Va
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource;
|
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
@ -44,10 +43,10 @@ public class GeoHashGridAggregatorFactory extends ValuesSourceAggregatorFactory<
|
||||||
private final int requiredSize;
|
private final int requiredSize;
|
||||||
private final int shardSize;
|
private final int shardSize;
|
||||||
|
|
||||||
public GeoHashGridAggregatorFactory(String name, Type type, ValuesSourceConfig<GeoPoint> config, int precision, int requiredSize,
|
public GeoHashGridAggregatorFactory(String name, ValuesSourceConfig<GeoPoint> config, int precision, int requiredSize,
|
||||||
int shardSize, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
int shardSize, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.precision = precision;
|
this.precision = precision;
|
||||||
this.requiredSize = requiredSize;
|
this.requiredSize = requiredSize;
|
||||||
this.shardSize = shardSize;
|
this.shardSize = shardSize;
|
||||||
|
|
|
@ -26,24 +26,22 @@ import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class GlobalAggregationBuilder extends AbstractAggregationBuilder<GlobalAggregationBuilder> {
|
public class GlobalAggregationBuilder extends AbstractAggregationBuilder<GlobalAggregationBuilder> {
|
||||||
public static final String NAME = "global";
|
public static final String NAME = "global";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
public GlobalAggregationBuilder(String name) {
|
public GlobalAggregationBuilder(String name) {
|
||||||
super(name, TYPE);
|
super(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public GlobalAggregationBuilder(StreamInput in) throws IOException {
|
public GlobalAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE);
|
super(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -54,7 +52,7 @@ public class GlobalAggregationBuilder extends AbstractAggregationBuilder<GlobalA
|
||||||
@Override
|
@Override
|
||||||
protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
|
protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new GlobalAggregatorFactory(name, type, context, parent, subFactoriesBuilder, metaData);
|
return new GlobalAggregatorFactory(name, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -80,7 +78,7 @@ public class GlobalAggregationBuilder extends AbstractAggregationBuilder<GlobalA
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
@ -33,9 +32,9 @@ import java.util.Map;
|
||||||
|
|
||||||
public class GlobalAggregatorFactory extends AggregatorFactory<GlobalAggregatorFactory> {
|
public class GlobalAggregatorFactory extends AggregatorFactory<GlobalAggregatorFactory> {
|
||||||
|
|
||||||
public GlobalAggregatorFactory(String name, Type type, SearchContext context, AggregatorFactory<?> parent,
|
public GlobalAggregatorFactory(String name, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, context, parent, subFactories, metaData);
|
super(name, context, parent, subFactories, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -32,7 +32,8 @@ import java.util.Map;
|
||||||
* regardless the query.
|
* regardless the query.
|
||||||
*/
|
*/
|
||||||
public class InternalGlobal extends InternalSingleBucketAggregation implements Global {
|
public class InternalGlobal extends InternalSingleBucketAggregation implements Global {
|
||||||
InternalGlobal(String name, long docCount, InternalAggregations aggregations, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
InternalGlobal(String name, long docCount, InternalAggregations aggregations, List<PipelineAggregator> pipelineAggregators,
|
||||||
|
Map<String, Object> metaData) {
|
||||||
super(name, docCount, aggregations, pipelineAggregators, metaData);
|
super(name, docCount, aggregations, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,7 +54,7 @@ import static java.util.Collections.unmodifiableMap;
|
||||||
*/
|
*/
|
||||||
public class DateHistogramAggregationBuilder
|
public class DateHistogramAggregationBuilder
|
||||||
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, DateHistogramAggregationBuilder> {
|
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, DateHistogramAggregationBuilder> {
|
||||||
public static final String NAME = InternalDateHistogram.TYPE.name();
|
public static final String NAME = "date_histogram";
|
||||||
|
|
||||||
public static final Map<String, DateTimeUnit> DATE_FIELD_UNITS;
|
public static final Map<String, DateTimeUnit> DATE_FIELD_UNITS;
|
||||||
|
|
||||||
|
@ -131,12 +131,12 @@ public class DateHistogramAggregationBuilder
|
||||||
|
|
||||||
/** Create a new builder with the given name. */
|
/** Create a new builder with the given name. */
|
||||||
public DateHistogramAggregationBuilder(String name) {
|
public DateHistogramAggregationBuilder(String name) {
|
||||||
super(name, InternalDateHistogram.TYPE, ValuesSourceType.NUMERIC, ValueType.DATE);
|
super(name, ValuesSourceType.NUMERIC, ValueType.DATE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Read from a stream, for internal use only. */
|
/** Read from a stream, for internal use only. */
|
||||||
public DateHistogramAggregationBuilder(StreamInput in) throws IOException {
|
public DateHistogramAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, InternalDateHistogram.TYPE, ValuesSourceType.NUMERIC, ValueType.DATE);
|
super(in, ValuesSourceType.NUMERIC, ValueType.DATE);
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
order = InternalOrder.Streams.readOrder(in);
|
order = InternalOrder.Streams.readOrder(in);
|
||||||
}
|
}
|
||||||
|
@ -315,7 +315,7 @@ public class DateHistogramAggregationBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -328,7 +328,7 @@ public class DateHistogramAggregationBuilder
|
||||||
// parse any string bounds to longs and round
|
// parse any string bounds to longs and round
|
||||||
roundedBounds = this.extendedBounds.parseAndValidate(name, context, config.format()).round(rounding);
|
roundedBounds = this.extendedBounds.parseAndValidate(name, context, config.format()).round(rounding);
|
||||||
}
|
}
|
||||||
return new DateHistogramAggregatorFactory(name, type, config, interval, dateHistogramInterval, offset, order, keyed, minDocCount,
|
return new DateHistogramAggregatorFactory(name, config, interval, dateHistogramInterval, offset, order, keyed, minDocCount,
|
||||||
rounding, roundedBounds, context, parent, subFactoriesBuilder, metaData);
|
rounding, roundedBounds, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.common.rounding.Rounding;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -47,11 +46,11 @@ public final class DateHistogramAggregatorFactory
|
||||||
private final ExtendedBounds extendedBounds;
|
private final ExtendedBounds extendedBounds;
|
||||||
private Rounding rounding;
|
private Rounding rounding;
|
||||||
|
|
||||||
public DateHistogramAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, long interval,
|
public DateHistogramAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, long interval,
|
||||||
DateHistogramInterval dateHistogramInterval, long offset, InternalOrder order, boolean keyed, long minDocCount,
|
DateHistogramInterval dateHistogramInterval, long offset, InternalOrder order, boolean keyed, long minDocCount,
|
||||||
Rounding rounding, ExtendedBounds extendedBounds, SearchContext context, AggregatorFactory<?> parent,
|
Rounding rounding, ExtendedBounds extendedBounds, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.interval = interval;
|
this.interval = interval;
|
||||||
this.dateHistogramInterval = dateHistogramInterval;
|
this.dateHistogramInterval = dateHistogramInterval;
|
||||||
this.offset = offset;
|
this.offset = offset;
|
||||||
|
|
|
@ -49,7 +49,7 @@ import java.util.Objects;
|
||||||
*/
|
*/
|
||||||
public class HistogramAggregationBuilder
|
public class HistogramAggregationBuilder
|
||||||
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, HistogramAggregationBuilder> {
|
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, HistogramAggregationBuilder> {
|
||||||
public static final String NAME = InternalHistogram.TYPE.name();
|
public static final String NAME = "histogram";
|
||||||
|
|
||||||
private static final ObjectParser<double[], ParseFieldMatcherSupplier> EXTENDED_BOUNDS_PARSER = new ObjectParser<>(
|
private static final ObjectParser<double[], ParseFieldMatcherSupplier> EXTENDED_BOUNDS_PARSER = new ObjectParser<>(
|
||||||
Histogram.EXTENDED_BOUNDS_FIELD.getPreferredName(),
|
Histogram.EXTENDED_BOUNDS_FIELD.getPreferredName(),
|
||||||
|
@ -94,12 +94,12 @@ public class HistogramAggregationBuilder
|
||||||
|
|
||||||
/** Create a new builder with the given name. */
|
/** Create a new builder with the given name. */
|
||||||
public HistogramAggregationBuilder(String name) {
|
public HistogramAggregationBuilder(String name) {
|
||||||
super(name, InternalHistogram.TYPE, ValuesSourceType.NUMERIC, ValueType.DOUBLE);
|
super(name, ValuesSourceType.NUMERIC, ValueType.DOUBLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Read from a stream, for internal use only. */
|
/** Read from a stream, for internal use only. */
|
||||||
public HistogramAggregationBuilder(StreamInput in) throws IOException {
|
public HistogramAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, InternalHistogram.TYPE, ValuesSourceType.NUMERIC, ValueType.DOUBLE);
|
super(in, ValuesSourceType.NUMERIC, ValueType.DOUBLE);
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
order = InternalOrder.Streams.readOrder(in);
|
order = InternalOrder.Streams.readOrder(in);
|
||||||
}
|
}
|
||||||
|
@ -260,14 +260,14 @@ public class HistogramAggregationBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return InternalHistogram.TYPE.name();
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ValuesSourceAggregatorFactory<Numeric, ?> innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
protected ValuesSourceAggregatorFactory<Numeric, ?> innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new HistogramAggregatorFactory(name, type, config, interval, offset, order, keyed, minDocCount, minBound, maxBound,
|
return new HistogramAggregatorFactory(name, config, interval, offset, order, keyed, minDocCount, minBound, maxBound,
|
||||||
context, parent, subFactoriesBuilder, metaData);
|
context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -42,11 +41,11 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact
|
||||||
private final long minDocCount;
|
private final long minDocCount;
|
||||||
private final double minBound, maxBound;
|
private final double minBound, maxBound;
|
||||||
|
|
||||||
HistogramAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, double interval, double offset,
|
HistogramAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, double interval, double offset,
|
||||||
InternalOrder order, boolean keyed, long minDocCount, double minBound, double maxBound,
|
InternalOrder order, boolean keyed, long minDocCount, double minBound, double maxBound,
|
||||||
SearchContext context, AggregatorFactory<?> parent,
|
SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.interval = interval;
|
this.interval = interval;
|
||||||
this.offset = offset;
|
this.offset = offset;
|
||||||
this.order = order;
|
this.order = order;
|
||||||
|
|
|
@ -43,13 +43,11 @@ import java.util.ListIterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Imelementation of {@link Histogram}.
|
* Implementation of {@link Histogram}.
|
||||||
*/
|
*/
|
||||||
public final class InternalDateHistogram extends InternalMultiBucketAggregation<InternalDateHistogram, InternalDateHistogram.Bucket>
|
public final class InternalDateHistogram extends InternalMultiBucketAggregation<InternalDateHistogram, InternalDateHistogram.Bucket>
|
||||||
implements Histogram, HistogramFactory {
|
implements Histogram, HistogramFactory {
|
||||||
|
|
||||||
static final Type TYPE = new Type("date_histogram");
|
|
||||||
|
|
||||||
public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket {
|
public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket {
|
||||||
|
|
||||||
final long key;
|
final long key;
|
||||||
|
|
|
@ -40,13 +40,10 @@ import java.util.ListIterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Imelementation of {@link Histogram}.
|
* Implementation of {@link Histogram}.
|
||||||
*/
|
*/
|
||||||
public final class InternalHistogram extends InternalMultiBucketAggregation<InternalHistogram, InternalHistogram.Bucket>
|
public final class InternalHistogram extends InternalMultiBucketAggregation<InternalHistogram, InternalHistogram.Bucket>
|
||||||
implements Histogram, HistogramFactory {
|
implements Histogram, HistogramFactory {
|
||||||
|
|
||||||
static final Type TYPE = new Type("histogram");
|
|
||||||
|
|
||||||
public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket {
|
public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket {
|
||||||
|
|
||||||
final double key;
|
final double key;
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||||
|
@ -40,7 +39,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, MissingAggregationBuilder> {
|
public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, MissingAggregationBuilder> {
|
||||||
public static final String NAME = "missing";
|
public static final String NAME = "missing";
|
||||||
public static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<MissingAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<MissingAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -53,14 +51,14 @@ public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<Va
|
||||||
}
|
}
|
||||||
|
|
||||||
public MissingAggregationBuilder(String name, ValueType targetValueType) {
|
public MissingAggregationBuilder(String name, ValueType targetValueType) {
|
||||||
super(name, TYPE, ValuesSourceType.ANY, targetValueType);
|
super(name, ValuesSourceType.ANY, targetValueType);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public MissingAggregationBuilder(StreamInput in) throws IOException {
|
public MissingAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.ANY);
|
super(in, ValuesSourceType.ANY);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -76,7 +74,7 @@ public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<Va
|
||||||
@Override
|
@Override
|
||||||
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context,
|
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context,
|
||||||
ValuesSourceConfig<ValuesSource> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
ValuesSourceConfig<ValuesSource> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new MissingAggregatorFactory(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
return new MissingAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -95,7 +93,7 @@ public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<Va
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.missing;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||||
|
@ -35,9 +34,9 @@ import java.util.Map;
|
||||||
|
|
||||||
public class MissingAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource, MissingAggregatorFactory> {
|
public class MissingAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource, MissingAggregatorFactory> {
|
||||||
|
|
||||||
public MissingAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource> config, SearchContext context,
|
public MissingAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, SearchContext context,
|
||||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -38,7 +37,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedAggregationBuilder> {
|
public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedAggregationBuilder> {
|
||||||
public static final String NAME = "nested";
|
public static final String NAME = "nested";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private final String path;
|
private final String path;
|
||||||
|
|
||||||
|
@ -50,7 +48,7 @@ public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedA
|
||||||
* match the path to a nested object in the mappings.
|
* match the path to a nested object in the mappings.
|
||||||
*/
|
*/
|
||||||
public NestedAggregationBuilder(String name, String path) {
|
public NestedAggregationBuilder(String name, String path) {
|
||||||
super(name, TYPE);
|
super(name);
|
||||||
if (path == null) {
|
if (path == null) {
|
||||||
throw new IllegalArgumentException("[path] must not be null: [" + name + "]");
|
throw new IllegalArgumentException("[path] must not be null: [" + name + "]");
|
||||||
}
|
}
|
||||||
|
@ -61,7 +59,7 @@ public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedA
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public NestedAggregationBuilder(StreamInput in) throws IOException {
|
public NestedAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE);
|
super(in);
|
||||||
path = in.readString();
|
path = in.readString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +81,7 @@ public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedA
|
||||||
ObjectMapper childObjectMapper = context.getObjectMapper(path);
|
ObjectMapper childObjectMapper = context.getObjectMapper(path);
|
||||||
if (childObjectMapper == null) {
|
if (childObjectMapper == null) {
|
||||||
// in case the path has been unmapped:
|
// in case the path has been unmapped:
|
||||||
return new NestedAggregatorFactory(name, type, null, null, context, parent, subFactoriesBuilder, metaData);
|
return new NestedAggregatorFactory(name, null, null, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (childObjectMapper.nested().isNested() == false) {
|
if (childObjectMapper.nested().isNested() == false) {
|
||||||
|
@ -91,7 +89,7 @@ public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedA
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
ObjectMapper parentObjectMapper = context.getQueryShardContext().nestedScope().nextLevel(childObjectMapper);
|
ObjectMapper parentObjectMapper = context.getQueryShardContext().nestedScope().nextLevel(childObjectMapper);
|
||||||
return new NestedAggregatorFactory(name, type, parentObjectMapper, childObjectMapper, context, parent, subFactoriesBuilder,
|
return new NestedAggregatorFactory(name, parentObjectMapper, childObjectMapper, context, parent, subFactoriesBuilder,
|
||||||
metaData);
|
metaData);
|
||||||
} finally {
|
} finally {
|
||||||
context.getQueryShardContext().nestedScope().previousLevel();
|
context.getQueryShardContext().nestedScope().previousLevel();
|
||||||
|
@ -148,7 +146,7 @@ public class NestedAggregationBuilder extends AbstractAggregationBuilder<NestedA
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
@ -38,10 +37,10 @@ public class NestedAggregatorFactory extends AggregatorFactory<NestedAggregatorF
|
||||||
private final ObjectMapper parentObjectMapper;
|
private final ObjectMapper parentObjectMapper;
|
||||||
private final ObjectMapper childObjectMapper;
|
private final ObjectMapper childObjectMapper;
|
||||||
|
|
||||||
public NestedAggregatorFactory(String name, Type type, ObjectMapper parentObjectMapper, ObjectMapper childObjectMapper,
|
public NestedAggregatorFactory(String name, ObjectMapper parentObjectMapper, ObjectMapper childObjectMapper,
|
||||||
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories,
|
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, context, parent, subFactories, metaData);
|
super(name, context, parent, subFactories, metaData);
|
||||||
this.parentObjectMapper = parentObjectMapper;
|
this.parentObjectMapper = parentObjectMapper;
|
||||||
this.childObjectMapper = childObjectMapper;
|
this.childObjectMapper = childObjectMapper;
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -40,19 +39,18 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class ReverseNestedAggregationBuilder extends AbstractAggregationBuilder<ReverseNestedAggregationBuilder> {
|
public class ReverseNestedAggregationBuilder extends AbstractAggregationBuilder<ReverseNestedAggregationBuilder> {
|
||||||
public static final String NAME = "reverse_nested";
|
public static final String NAME = "reverse_nested";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private String path;
|
private String path;
|
||||||
|
|
||||||
public ReverseNestedAggregationBuilder(String name) {
|
public ReverseNestedAggregationBuilder(String name) {
|
||||||
super(name, TYPE);
|
super(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public ReverseNestedAggregationBuilder(StreamInput in) throws IOException {
|
public ReverseNestedAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE);
|
super(in);
|
||||||
path = in.readOptionalString();
|
path = in.readOptionalString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -93,7 +91,7 @@ public class ReverseNestedAggregationBuilder extends AbstractAggregationBuilder<
|
||||||
if (path != null) {
|
if (path != null) {
|
||||||
parentObjectMapper = context.getObjectMapper(path);
|
parentObjectMapper = context.getObjectMapper(path);
|
||||||
if (parentObjectMapper == null) {
|
if (parentObjectMapper == null) {
|
||||||
return new ReverseNestedAggregatorFactory(name, type, true, null, context, parent, subFactoriesBuilder, metaData);
|
return new ReverseNestedAggregatorFactory(name, true, null, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
if (parentObjectMapper.nested().isNested() == false) {
|
if (parentObjectMapper.nested().isNested() == false) {
|
||||||
throw new AggregationExecutionException("[reverse_nested] nested path [" + path + "] is not nested");
|
throw new AggregationExecutionException("[reverse_nested] nested path [" + path + "] is not nested");
|
||||||
|
@ -103,7 +101,7 @@ public class ReverseNestedAggregationBuilder extends AbstractAggregationBuilder<
|
||||||
NestedScope nestedScope = context.getQueryShardContext().nestedScope();
|
NestedScope nestedScope = context.getQueryShardContext().nestedScope();
|
||||||
try {
|
try {
|
||||||
nestedScope.nextLevel(parentObjectMapper);
|
nestedScope.nextLevel(parentObjectMapper);
|
||||||
return new ReverseNestedAggregatorFactory(name, type, false, parentObjectMapper, context, parent, subFactoriesBuilder,
|
return new ReverseNestedAggregatorFactory(name, false, parentObjectMapper, context, parent, subFactoriesBuilder,
|
||||||
metaData);
|
metaData);
|
||||||
} finally {
|
} finally {
|
||||||
nestedScope.previousLevel();
|
nestedScope.previousLevel();
|
||||||
|
@ -172,7 +170,7 @@ public class ReverseNestedAggregationBuilder extends AbstractAggregationBuilder<
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
@ -38,11 +37,11 @@ public class ReverseNestedAggregatorFactory extends AggregatorFactory<ReverseNes
|
||||||
private final boolean unmapped;
|
private final boolean unmapped;
|
||||||
private final ObjectMapper parentObjectMapper;
|
private final ObjectMapper parentObjectMapper;
|
||||||
|
|
||||||
public ReverseNestedAggregatorFactory(String name, Type type, boolean unmapped, ObjectMapper parentObjectMapper,
|
public ReverseNestedAggregatorFactory(String name, boolean unmapped, ObjectMapper parentObjectMapper,
|
||||||
SearchContext context, AggregatorFactory<?> parent,
|
SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactories,
|
AggregatorFactories.Builder subFactories,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, context, parent, subFactories, metaData);
|
super(name, context, parent, subFactories, metaData);
|
||||||
this.unmapped = unmapped;
|
this.unmapped = unmapped;
|
||||||
this.parentObjectMapper = parentObjectMapper;
|
this.parentObjectMapper = parentObjectMapper;
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.range;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
@ -43,10 +42,10 @@ public class AbstractRangeAggregatorFactory<AF extends AbstractRangeAggregatorFa
|
||||||
private final R[] ranges;
|
private final R[] ranges;
|
||||||
private final boolean keyed;
|
private final boolean keyed;
|
||||||
|
|
||||||
public AbstractRangeAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, R[] ranges, boolean keyed,
|
public AbstractRangeAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, R[] ranges, boolean keyed,
|
||||||
InternalRange.Factory<?, ?> rangeFactory, SearchContext context, AggregatorFactory<?> parent,
|
InternalRange.Factory<?, ?> rangeFactory, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.ranges = ranges;
|
this.ranges = ranges;
|
||||||
this.keyed = keyed;
|
this.keyed = keyed;
|
||||||
this.rangeFactory = rangeFactory;
|
this.rangeFactory = rangeFactory;
|
||||||
|
|
|
@ -44,7 +44,7 @@ public abstract class AbstractRangeBuilder<AB extends AbstractRangeBuilder<AB, R
|
||||||
protected boolean keyed = false;
|
protected boolean keyed = false;
|
||||||
|
|
||||||
protected AbstractRangeBuilder(String name, InternalRange.Factory<?, ?> rangeFactory) {
|
protected AbstractRangeBuilder(String name, InternalRange.Factory<?, ?> rangeFactory) {
|
||||||
super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
super(name, rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||||
this.rangeFactory = rangeFactory;
|
this.rangeFactory = rangeFactory;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ public abstract class AbstractRangeBuilder<AB extends AbstractRangeBuilder<AB, R
|
||||||
*/
|
*/
|
||||||
protected AbstractRangeBuilder(StreamInput in, InternalRange.Factory<?, ?> rangeFactory, Writeable.Reader<R> rangeReader)
|
protected AbstractRangeBuilder(StreamInput in, InternalRange.Factory<?, ?> rangeFactory, Writeable.Reader<R> rangeReader)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super(in, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
super(in, rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||||
this.rangeFactory = rangeFactory;
|
this.rangeFactory = rangeFactory;
|
||||||
ranges = in.readList(rangeReader);
|
ranges = in.readList(rangeReader);
|
||||||
keyed = in.readBoolean();
|
keyed = in.readBoolean();
|
||||||
|
|
|
@ -18,33 +18,32 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations.bucket.range;
|
package org.elasticsearch.search.aggregations.bucket.range;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
public class BinaryRangeAggregatorFactory
|
public class BinaryRangeAggregatorFactory
|
||||||
extends ValuesSourceAggregatorFactory<ValuesSource.Bytes, BinaryRangeAggregatorFactory> {
|
extends ValuesSourceAggregatorFactory<ValuesSource.Bytes, BinaryRangeAggregatorFactory> {
|
||||||
|
|
||||||
private final List<BinaryRangeAggregator.Range> ranges;
|
private final List<BinaryRangeAggregator.Range> ranges;
|
||||||
private final boolean keyed;
|
private final boolean keyed;
|
||||||
|
|
||||||
public BinaryRangeAggregatorFactory(String name, Type type,
|
public BinaryRangeAggregatorFactory(String name,
|
||||||
ValuesSourceConfig<ValuesSource.Bytes> config,
|
ValuesSourceConfig<ValuesSource.Bytes> config,
|
||||||
List<BinaryRangeAggregator.Range> ranges, boolean keyed,
|
List<BinaryRangeAggregator.Range> ranges, boolean keyed,
|
||||||
SearchContext context,
|
SearchContext context,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder,
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.ranges = ranges;
|
this.ranges = ranges;
|
||||||
this.keyed = keyed;
|
this.keyed = keyed;
|
||||||
}
|
}
|
||||||
|
|
|
@ -175,10 +175,6 @@ public class InternalRange<B extends InternalRange.Bucket, R extends InternalRan
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Factory<B extends Bucket, R extends InternalRange<B, R>> {
|
public static class Factory<B extends Bucket, R extends InternalRange<B, R>> {
|
||||||
public Type type() {
|
|
||||||
return RangeAggregationBuilder.TYPE;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ValuesSourceType getValueSourceType() {
|
public ValuesSourceType getValueSourceType() {
|
||||||
return ValuesSourceType.NUMERIC;
|
return ValuesSourceType.NUMERIC;
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,10 +23,9 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||||
|
@ -37,7 +36,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregationBuilder, Range> {
|
public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregationBuilder, Range> {
|
||||||
public static final String NAME = "range";
|
public static final String NAME = "range";
|
||||||
static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<RangeAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<RangeAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -142,12 +140,12 @@ public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregati
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
// We need to call processRanges here so they are parsed before we make the decision of whether to cache the request
|
// We need to call processRanges here so they are parsed before we make the decision of whether to cache the request
|
||||||
Range[] ranges = processRanges(context, config);
|
Range[] ranges = processRanges(context, config);
|
||||||
return new RangeAggregatorFactory(name, type, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder,
|
return new RangeAggregatorFactory(name, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder,
|
||||||
metaData);
|
metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.range;
|
||||||
|
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange.Factory;
|
import org.elasticsearch.search.aggregations.bucket.range.InternalRange.Factory;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -33,10 +32,10 @@ import java.util.Map;
|
||||||
|
|
||||||
public class RangeAggregatorFactory extends AbstractRangeAggregatorFactory<RangeAggregatorFactory, RangeAggregator.Range> {
|
public class RangeAggregatorFactory extends AbstractRangeAggregatorFactory<RangeAggregatorFactory, RangeAggregator.Range> {
|
||||||
|
|
||||||
public RangeAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, Range[] ranges, boolean keyed,
|
public RangeAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, Range[] ranges, boolean keyed,
|
||||||
Factory<?, ?> rangeFactory, SearchContext context, AggregatorFactory<?> parent,
|
Factory<?, ?> rangeFactory, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,10 +23,9 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||||
|
@ -40,7 +39,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeAggregationBuilder, RangeAggregator.Range> {
|
public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeAggregationBuilder, RangeAggregator.Range> {
|
||||||
public static final String NAME = "date_range";
|
public static final String NAME = "date_range";
|
||||||
static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<DateRangeAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<DateRangeAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -75,7 +73,7 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -288,7 +286,7 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
|
||||||
// We need to call processRanges here so they are parsed and we know whether `now` has been used before we make
|
// We need to call processRanges here so they are parsed and we know whether `now` has been used before we make
|
||||||
// the decision of whether to cache the request
|
// the decision of whether to cache the request
|
||||||
Range[] ranges = processRanges(context, config);
|
Range[] ranges = processRanges(context, config);
|
||||||
return new DateRangeAggregatorFactory(name, type, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder,
|
return new DateRangeAggregatorFactory(name, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder,
|
||||||
metaData);
|
metaData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.range.date;
|
||||||
|
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeAggregatorFactory;
|
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeAggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange.Factory;
|
import org.elasticsearch.search.aggregations.bucket.range.InternalRange.Factory;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||||
|
@ -34,10 +33,10 @@ import java.util.Map;
|
||||||
|
|
||||||
public class DateRangeAggregatorFactory extends AbstractRangeAggregatorFactory<DateRangeAggregatorFactory, Range> {
|
public class DateRangeAggregatorFactory extends AbstractRangeAggregatorFactory<DateRangeAggregatorFactory, Range> {
|
||||||
|
|
||||||
public DateRangeAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, Range[] ranges, boolean keyed,
|
public DateRangeAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, Range[] ranges, boolean keyed,
|
||||||
Factory<?, ?> rangeFactory, SearchContext context, AggregatorFactory<?> parent,
|
Factory<?, ?> rangeFactory, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,11 +84,6 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Factory extends InternalRange.Factory<InternalDateRange.Bucket, InternalDateRange> {
|
public static class Factory extends InternalRange.Factory<InternalDateRange.Bucket, InternalDateRange> {
|
||||||
@Override
|
|
||||||
public Type type() {
|
|
||||||
return DateRangeAggregationBuilder.TYPE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ValueType getValueType() {
|
public ValueType getValueType() {
|
||||||
return ValueType.DATE;
|
return ValueType.DATE;
|
||||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
|
@ -51,7 +50,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoDistanceAggregationBuilder> {
|
public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoDistanceAggregationBuilder> {
|
||||||
public static final String NAME = "geo_distance";
|
public static final String NAME = "geo_distance";
|
||||||
public static final Type TYPE = new Type(NAME);
|
|
||||||
static final ParseField ORIGIN_FIELD = new ParseField("origin", "center", "point", "por");
|
static final ParseField ORIGIN_FIELD = new ParseField("origin", "center", "point", "por");
|
||||||
static final ParseField UNIT_FIELD = new ParseField("unit");
|
static final ParseField UNIT_FIELD = new ParseField("unit");
|
||||||
static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
||||||
|
@ -215,7 +213,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
|
|
||||||
private GeoDistanceAggregationBuilder(String name, GeoPoint origin,
|
private GeoDistanceAggregationBuilder(String name, GeoPoint origin,
|
||||||
InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> rangeFactory) {
|
InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> rangeFactory) {
|
||||||
super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
super(name, rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||||
this.origin = origin;
|
this.origin = origin;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,8 +221,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public GeoDistanceAggregationBuilder(StreamInput in) throws IOException {
|
public GeoDistanceAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, InternalGeoDistance.FACTORY.type(), InternalGeoDistance.FACTORY.getValueSourceType(),
|
super(in, InternalGeoDistance.FACTORY.getValueSourceType(), InternalGeoDistance.FACTORY.getValueType());
|
||||||
InternalGeoDistance.FACTORY.getValueType());
|
|
||||||
origin = new GeoPoint(in.readDouble(), in.readDouble());
|
origin = new GeoPoint(in.readDouble(), in.readDouble());
|
||||||
int size = in.readVInt();
|
int size = in.readVInt();
|
||||||
ranges = new ArrayList<>(size);
|
ranges = new ArrayList<>(size);
|
||||||
|
@ -345,7 +342,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -387,7 +384,7 @@ public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
ValuesSourceConfig<ValuesSource.GeoPoint> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
|
ValuesSourceConfig<ValuesSource.GeoPoint> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Range[] ranges = this.ranges.toArray(new Range[this.range().size()]);
|
Range[] ranges = this.ranges.toArray(new Range[this.range().size()]);
|
||||||
return new GeoDistanceRangeAggregatorFactory(name, type, config, origin, ranges, unit, distanceType, keyed, context, parent,
|
return new GeoDistanceRangeAggregatorFactory(name, config, origin, ranges, unit, distanceType, keyed, context, parent,
|
||||||
subFactoriesBuilder, metaData);
|
subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Unmapped;
|
||||||
|
@ -56,10 +55,10 @@ public class GeoDistanceRangeAggregatorFactory
|
||||||
private final GeoDistance distanceType;
|
private final GeoDistance distanceType;
|
||||||
private final boolean keyed;
|
private final boolean keyed;
|
||||||
|
|
||||||
public GeoDistanceRangeAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource.GeoPoint> config, GeoPoint origin,
|
public GeoDistanceRangeAggregatorFactory(String name, ValuesSourceConfig<ValuesSource.GeoPoint> config, GeoPoint origin,
|
||||||
Range[] ranges, DistanceUnit unit, GeoDistance distanceType, boolean keyed, SearchContext context,
|
Range[] ranges, DistanceUnit unit, GeoDistance distanceType, boolean keyed, SearchContext context,
|
||||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.origin = origin;
|
this.origin = origin;
|
||||||
this.ranges = ranges;
|
this.ranges = ranges;
|
||||||
this.unit = unit;
|
this.unit = unit;
|
||||||
|
|
|
@ -59,11 +59,6 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Factory extends InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> {
|
public static class Factory extends InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> {
|
||||||
@Override
|
|
||||||
public Type type() {
|
|
||||||
return GeoDistanceAggregationBuilder.TYPE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ValuesSourceType getValueSourceType() {
|
public ValuesSourceType getValueSourceType() {
|
||||||
return ValuesSourceType.GEOPOINT;
|
return ValuesSourceType.GEOPOINT;
|
||||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator;
|
import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregatorFactory;
|
import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||||
|
@ -60,7 +59,6 @@ import java.util.Objects;
|
||||||
public final class IpRangeAggregationBuilder
|
public final class IpRangeAggregationBuilder
|
||||||
extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, IpRangeAggregationBuilder> {
|
extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, IpRangeAggregationBuilder> {
|
||||||
public static final String NAME = "ip_range";
|
public static final String NAME = "ip_range";
|
||||||
private static final InternalAggregation.Type TYPE = new InternalAggregation.Type(NAME);
|
|
||||||
private static final ParseField MASK_FIELD = new ParseField("mask");
|
private static final ParseField MASK_FIELD = new ParseField("mask");
|
||||||
|
|
||||||
private static final ObjectParser<IpRangeAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<IpRangeAggregationBuilder, QueryParseContext> PARSER;
|
||||||
|
@ -233,11 +231,11 @@ public final class IpRangeAggregationBuilder
|
||||||
private List<Range> ranges = new ArrayList<>();
|
private List<Range> ranges = new ArrayList<>();
|
||||||
|
|
||||||
public IpRangeAggregationBuilder(String name) {
|
public IpRangeAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.BYTES, ValueType.IP);
|
super(name, ValuesSourceType.BYTES, ValueType.IP);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -339,7 +337,7 @@ public final class IpRangeAggregationBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
public IpRangeAggregationBuilder(StreamInput in) throws IOException {
|
public IpRangeAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.BYTES, ValueType.IP);
|
super(in, ValuesSourceType.BYTES, ValueType.IP);
|
||||||
final int numRanges = in.readVInt();
|
final int numRanges = in.readVInt();
|
||||||
for (int i = 0; i < numRanges; ++i) {
|
for (int i = 0; i < numRanges; ++i) {
|
||||||
addRange(new Range(in));
|
addRange(new Range(in));
|
||||||
|
@ -374,7 +372,7 @@ public final class IpRangeAggregationBuilder
|
||||||
for (Range range : this.ranges) {
|
for (Range range : this.ranges) {
|
||||||
ranges.add(new BinaryRangeAggregator.Range(range.key, toBytesRef(range.from), toBytesRef(range.to)));
|
ranges.add(new BinaryRangeAggregator.Range(range.key, toBytesRef(range.from), toBytesRef(range.to)));
|
||||||
}
|
}
|
||||||
return new BinaryRangeAggregatorFactory(name, TYPE, config, ranges,
|
return new BinaryRangeAggregatorFactory(name, config, ranges,
|
||||||
keyed, context, parent, subFactoriesBuilder, metaData);
|
keyed, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,10 +24,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||||
|
@ -41,7 +40,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, DiversifiedAggregationBuilder> {
|
public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, DiversifiedAggregationBuilder> {
|
||||||
public static final String NAME = "diversified_sampler";
|
public static final String NAME = "diversified_sampler";
|
||||||
public static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1;
|
public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1;
|
||||||
|
|
||||||
|
@ -63,14 +61,14 @@ public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
private String executionHint = null;
|
private String executionHint = null;
|
||||||
|
|
||||||
public DiversifiedAggregationBuilder(String name) {
|
public DiversifiedAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.ANY, null);
|
super(name, ValuesSourceType.ANY, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public DiversifiedAggregationBuilder(StreamInput in) throws IOException {
|
public DiversifiedAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.ANY, null);
|
super(in, ValuesSourceType.ANY, null);
|
||||||
shardSize = in.readVInt();
|
shardSize = in.readVInt();
|
||||||
maxDocsPerValue = in.readVInt();
|
maxDocsPerValue = in.readVInt();
|
||||||
executionHint = in.readOptionalString();
|
executionHint = in.readOptionalString();
|
||||||
|
@ -139,7 +137,7 @@ public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
@Override
|
@Override
|
||||||
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context,
|
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context,
|
||||||
ValuesSourceConfig<ValuesSource> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
ValuesSourceConfig<ValuesSource> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new DiversifiedAggregatorFactory(name, TYPE, config, shardSize, maxDocsPerValue, executionHint, context, parent,
|
return new DiversifiedAggregatorFactory(name, config, shardSize, maxDocsPerValue, executionHint, context, parent,
|
||||||
subFactoriesBuilder, metaData);
|
subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,7 +165,7 @@ public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilde
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator.ExecutionMode;
|
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator.ExecutionMode;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
@ -44,10 +43,10 @@ public class DiversifiedAggregatorFactory extends ValuesSourceAggregatorFactory<
|
||||||
private final int maxDocsPerValue;
|
private final int maxDocsPerValue;
|
||||||
private final String executionHint;
|
private final String executionHint;
|
||||||
|
|
||||||
public DiversifiedAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource> config, int shardSize, int maxDocsPerValue,
|
public DiversifiedAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, int shardSize, int maxDocsPerValue,
|
||||||
String executionHint, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
String executionHint, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.shardSize = shardSize;
|
this.shardSize = shardSize;
|
||||||
this.maxDocsPerValue = maxDocsPerValue;
|
this.maxDocsPerValue = maxDocsPerValue;
|
||||||
this.executionHint = executionHint;
|
this.executionHint = executionHint;
|
||||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -36,21 +35,20 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class SamplerAggregationBuilder extends AbstractAggregationBuilder<SamplerAggregationBuilder> {
|
public class SamplerAggregationBuilder extends AbstractAggregationBuilder<SamplerAggregationBuilder> {
|
||||||
public static final String NAME = "sampler";
|
public static final String NAME = "sampler";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
public static final int DEFAULT_SHARD_SAMPLE_SIZE = 100;
|
public static final int DEFAULT_SHARD_SAMPLE_SIZE = 100;
|
||||||
|
|
||||||
private int shardSize = DEFAULT_SHARD_SAMPLE_SIZE;
|
private int shardSize = DEFAULT_SHARD_SAMPLE_SIZE;
|
||||||
|
|
||||||
public SamplerAggregationBuilder(String name) {
|
public SamplerAggregationBuilder(String name) {
|
||||||
super(name, TYPE);
|
super(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public SamplerAggregationBuilder(StreamInput in) throws IOException {
|
public SamplerAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE);
|
super(in);
|
||||||
shardSize = in.readVInt();
|
shardSize = in.readVInt();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +75,7 @@ public class SamplerAggregationBuilder extends AbstractAggregationBuilder<Sample
|
||||||
@Override
|
@Override
|
||||||
protected SamplerAggregatorFactory doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
|
protected SamplerAggregatorFactory doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new SamplerAggregatorFactory(name, type, shardSize, context, parent, subFactoriesBuilder, metaData);
|
return new SamplerAggregatorFactory(name, shardSize, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -129,7 +127,7 @@ public class SamplerAggregationBuilder extends AbstractAggregationBuilder<Sample
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.bucket.sampler;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
@ -34,9 +33,9 @@ public class SamplerAggregatorFactory extends AggregatorFactory<SamplerAggregato
|
||||||
|
|
||||||
private final int shardSize;
|
private final int shardSize;
|
||||||
|
|
||||||
public SamplerAggregatorFactory(String name, Type type, int shardSize, SearchContext context, AggregatorFactory<?> parent,
|
public SamplerAggregatorFactory(String name, int shardSize, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, context, parent, subFactories, metaData);
|
super(name, context, parent, subFactories, metaData);
|
||||||
this.shardSize = shardSize;
|
this.shardSize = shardSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,12 +26,10 @@ import org.elasticsearch.common.xcontent.ParseFieldRegistry;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
|
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
|
||||||
|
@ -53,7 +51,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, SignificantTermsAggregationBuilder> {
|
public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, SignificantTermsAggregationBuilder> {
|
||||||
public static final String NAME = "significant_terms";
|
public static final String NAME = "significant_terms";
|
||||||
public static final InternalAggregation.Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
static final ParseField BACKGROUND_FILTER = new ParseField("background_filter");
|
static final ParseField BACKGROUND_FILTER = new ParseField("background_filter");
|
||||||
static final ParseField HEURISTIC = new ParseField("significance_heuristic");
|
static final ParseField HEURISTIC = new ParseField("significance_heuristic");
|
||||||
|
@ -111,14 +108,14 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB
|
||||||
private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC;
|
private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC;
|
||||||
|
|
||||||
public SignificantTermsAggregationBuilder(String name, ValueType valueType) {
|
public SignificantTermsAggregationBuilder(String name, ValueType valueType) {
|
||||||
super(name, TYPE, ValuesSourceType.ANY, valueType);
|
super(name, ValuesSourceType.ANY, valueType);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a Stream.
|
* Read from a Stream.
|
||||||
*/
|
*/
|
||||||
public SignificantTermsAggregationBuilder(StreamInput in) throws IOException {
|
public SignificantTermsAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.ANY);
|
super(in, ValuesSourceType.ANY);
|
||||||
bucketCountThresholds = new BucketCountThresholds(in);
|
bucketCountThresholds = new BucketCountThresholds(in);
|
||||||
executionHint = in.readOptionalString();
|
executionHint = in.readOptionalString();
|
||||||
filterBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
|
filterBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||||
|
@ -267,7 +264,7 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB
|
||||||
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource> config,
|
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
SignificanceHeuristic executionHeuristic = this.significanceHeuristic.rewrite(context);
|
SignificanceHeuristic executionHeuristic = this.significanceHeuristic.rewrite(context);
|
||||||
return new SignificantTermsAggregatorFactory(name, type, config, includeExclude, executionHint, filterBuilder,
|
return new SignificantTermsAggregatorFactory(name, config, includeExclude, executionHint, filterBuilder,
|
||||||
bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, metaData);
|
bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -303,7 +300,7 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
||||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||||
|
@ -71,11 +70,11 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||||
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
|
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
|
||||||
private final SignificanceHeuristic significanceHeuristic;
|
private final SignificanceHeuristic significanceHeuristic;
|
||||||
|
|
||||||
public SignificantTermsAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource> config, IncludeExclude includeExclude,
|
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, IncludeExclude includeExclude,
|
||||||
String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds,
|
String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds,
|
||||||
SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent,
|
SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent,
|
||||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.includeExclude = includeExclude;
|
this.includeExclude = includeExclude;
|
||||||
this.executionHint = executionHint;
|
this.executionHint = executionHint;
|
||||||
this.filter = filterBuilder == null
|
this.filter = filterBuilder == null
|
||||||
|
|
|
@ -22,10 +22,12 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -152,4 +154,32 @@ public class DoubleTerms extends InternalMappedTerms<DoubleTerms, DoubleTerms.Bu
|
||||||
protected Bucket[] createBucketsArray(int size) {
|
protected Bucket[] createBucketsArray(int size) {
|
||||||
return new Bucket[size];
|
return new Bucket[size];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InternalAggregation doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
|
||||||
|
boolean promoteToDouble = false;
|
||||||
|
for (InternalAggregation agg : aggregations) {
|
||||||
|
if (agg instanceof LongTerms && ((LongTerms) agg).format == DocValueFormat.RAW) {
|
||||||
|
/**
|
||||||
|
* this terms agg mixes longs and doubles, we must promote longs to doubles to make the internal aggs
|
||||||
|
* compatible
|
||||||
|
*/
|
||||||
|
promoteToDouble = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (promoteToDouble == false) {
|
||||||
|
return super.doReduce(aggregations, reduceContext);
|
||||||
|
}
|
||||||
|
List<InternalAggregation> newAggs = new ArrayList<>();
|
||||||
|
for (InternalAggregation agg : aggregations) {
|
||||||
|
if (agg instanceof LongTerms) {
|
||||||
|
DoubleTerms dTerms = LongTerms.convertLongTermsToDouble((LongTerms) agg, format);
|
||||||
|
newAggs.add(dTerms);
|
||||||
|
} else {
|
||||||
|
newAggs.add(agg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return newAggs.get(0).doReduce(newAggs, reduceContext);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,10 +22,12 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -152,4 +154,32 @@ public class LongTerms extends InternalMappedTerms<LongTerms, LongTerms.Bucket>
|
||||||
protected Bucket[] createBucketsArray(int size) {
|
protected Bucket[] createBucketsArray(int size) {
|
||||||
return new Bucket[size];
|
return new Bucket[size];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InternalAggregation doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
|
||||||
|
for (InternalAggregation agg : aggregations) {
|
||||||
|
if (agg instanceof DoubleTerms) {
|
||||||
|
return agg.doReduce(aggregations, reduceContext);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return super.doReduce(aggregations, reduceContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a {@link LongTerms} into a {@link DoubleTerms}, returning the value of the specified long terms as doubles.
|
||||||
|
*/
|
||||||
|
static DoubleTerms convertLongTermsToDouble(LongTerms longTerms, DocValueFormat decimalFormat) {
|
||||||
|
List<Terms.Bucket> buckets = longTerms.getBuckets();
|
||||||
|
List<DoubleTerms.Bucket> newBuckets = new ArrayList<>();
|
||||||
|
for (Terms.Bucket bucket : buckets) {
|
||||||
|
newBuckets.add(new DoubleTerms.Bucket(bucket.getKeyAsNumber().doubleValue(),
|
||||||
|
bucket.getDocCount(), (InternalAggregations) bucket.getAggregations(), longTerms.showTermDocCountError,
|
||||||
|
longTerms.showTermDocCountError ? bucket.getDocCountError() : 0, decimalFormat));
|
||||||
|
}
|
||||||
|
return new DoubleTerms(longTerms.getName(), longTerms.order, longTerms.requiredSize,
|
||||||
|
longTerms.minDocCount, longTerms.pipelineAggregators(),
|
||||||
|
longTerms.metaData, longTerms.format, longTerms.shardSize,
|
||||||
|
longTerms.showTermDocCountError, longTerms.otherDocCount,
|
||||||
|
newBuckets, longTerms.docCountError);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,12 +26,10 @@ import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
|
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||||
|
@ -50,7 +48,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, TermsAggregationBuilder> {
|
public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, TermsAggregationBuilder> {
|
||||||
public static final String NAME = "terms";
|
public static final String NAME = "terms";
|
||||||
private static final InternalAggregation.Type TYPE = new Type("terms");
|
|
||||||
|
|
||||||
public static final ParseField EXECUTION_HINT_FIELD_NAME = new ParseField("execution_hint");
|
public static final ParseField EXECUTION_HINT_FIELD_NAME = new ParseField("execution_hint");
|
||||||
public static final ParseField SHARD_SIZE_FIELD_NAME = new ParseField("shard_size");
|
public static final ParseField SHARD_SIZE_FIELD_NAME = new ParseField("shard_size");
|
||||||
|
@ -108,14 +105,14 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||||
private boolean showTermDocCountError = false;
|
private boolean showTermDocCountError = false;
|
||||||
|
|
||||||
public TermsAggregationBuilder(String name, ValueType valueType) {
|
public TermsAggregationBuilder(String name, ValueType valueType) {
|
||||||
super(name, TYPE, ValuesSourceType.ANY, valueType);
|
super(name, ValuesSourceType.ANY, valueType);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public TermsAggregationBuilder(StreamInput in) throws IOException {
|
public TermsAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.ANY);
|
super(in, ValuesSourceType.ANY);
|
||||||
bucketCountThresholds = new BucketCountThresholds(in);
|
bucketCountThresholds = new BucketCountThresholds(in);
|
||||||
collectMode = in.readOptionalWriteable(SubAggCollectionMode::readFromStream);
|
collectMode = in.readOptionalWriteable(SubAggCollectionMode::readFromStream);
|
||||||
executionHint = in.readOptionalString();
|
executionHint = in.readOptionalString();
|
||||||
|
@ -287,7 +284,7 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||||
@Override
|
@Override
|
||||||
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource> config,
|
protected ValuesSourceAggregatorFactory<ValuesSource, ?> innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new TermsAggregatorFactory(name, type, config, order, includeExclude, executionHint, collectMode,
|
return new TermsAggregatorFactory(name, config, order, includeExclude, executionHint, collectMode,
|
||||||
bucketCountThresholds, showTermDocCountError, context, parent, subFactoriesBuilder, metaData);
|
bucketCountThresholds, showTermDocCountError, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -326,7 +323,7 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||||
|
@ -52,11 +51,11 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||||
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
|
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
|
||||||
private boolean showTermDocCountError;
|
private boolean showTermDocCountError;
|
||||||
|
|
||||||
public TermsAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource> config, Terms.Order order,
|
public TermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, Terms.Order order,
|
||||||
IncludeExclude includeExclude, String executionHint, SubAggCollectionMode collectMode,
|
IncludeExclude includeExclude, String executionHint, SubAggCollectionMode collectMode,
|
||||||
TermsAggregator.BucketCountThresholds bucketCountThresholds, boolean showTermDocCountError, SearchContext context,
|
TermsAggregator.BucketCountThresholds bucketCountThresholds, boolean showTermDocCountError, SearchContext context,
|
||||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.order = order;
|
this.order = order;
|
||||||
this.includeExclude = includeExclude;
|
this.includeExclude = includeExclude;
|
||||||
this.executionHint = executionHint;
|
this.executionHint = executionHint;
|
||||||
|
|
|
@ -24,10 +24,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -41,7 +40,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregationBuilder> {
|
public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregationBuilder> {
|
||||||
public static final String NAME = "avg";
|
public static final String NAME = "avg";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<AvgAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<AvgAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -54,14 +52,14 @@ public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
}
|
}
|
||||||
|
|
||||||
public AvgAggregationBuilder(String name) {
|
public AvgAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
super(name, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public AvgAggregationBuilder(StreamInput in) throws IOException {
|
public AvgAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
super(in, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -72,7 +70,7 @@ public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
@Override
|
@Override
|
||||||
protected AvgAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
protected AvgAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new AvgAggregatorFactory(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
return new AvgAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -91,7 +89,7 @@ public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.metrics.avg;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -36,9 +35,9 @@ import java.util.Map;
|
||||||
|
|
||||||
public class AvgAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, AvgAggregatorFactory> {
|
public class AvgAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, AvgAggregatorFactory> {
|
||||||
|
|
||||||
public AvgAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, SearchContext context,
|
public AvgAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, SearchContext context,
|
||||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -25,10 +25,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||||
|
@ -44,7 +43,6 @@ public final class CardinalityAggregationBuilder
|
||||||
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource, CardinalityAggregationBuilder> {
|
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource, CardinalityAggregationBuilder> {
|
||||||
|
|
||||||
public static final String NAME = "cardinality";
|
public static final String NAME = "cardinality";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed");
|
private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed");
|
||||||
public static final ParseField PRECISION_THRESHOLD_FIELD = new ParseField("precision_threshold");
|
public static final ParseField PRECISION_THRESHOLD_FIELD = new ParseField("precision_threshold");
|
||||||
|
@ -64,14 +62,14 @@ public final class CardinalityAggregationBuilder
|
||||||
private Long precisionThreshold = null;
|
private Long precisionThreshold = null;
|
||||||
|
|
||||||
public CardinalityAggregationBuilder(String name, ValueType targetValueType) {
|
public CardinalityAggregationBuilder(String name, ValueType targetValueType) {
|
||||||
super(name, TYPE, ValuesSourceType.ANY, targetValueType);
|
super(name, ValuesSourceType.ANY, targetValueType);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public CardinalityAggregationBuilder(StreamInput in) throws IOException {
|
public CardinalityAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.ANY);
|
super(in, ValuesSourceType.ANY);
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
precisionThreshold = in.readLong();
|
precisionThreshold = in.readLong();
|
||||||
}
|
}
|
||||||
|
@ -124,7 +122,7 @@ public final class CardinalityAggregationBuilder
|
||||||
@Override
|
@Override
|
||||||
protected CardinalityAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource> config,
|
protected CardinalityAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new CardinalityAggregatorFactory(name, type, config, precisionThreshold, context, parent, subFactoriesBuilder, metaData);
|
return new CardinalityAggregatorFactory(name, config, precisionThreshold, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -147,7 +145,7 @@ public final class CardinalityAggregationBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||||
|
@ -37,10 +36,10 @@ public class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<
|
||||||
|
|
||||||
private final Long precisionThreshold;
|
private final Long precisionThreshold;
|
||||||
|
|
||||||
public CardinalityAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource> config, Long precisionThreshold,
|
public CardinalityAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, Long precisionThreshold,
|
||||||
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.precisionThreshold = precisionThreshold;
|
this.precisionThreshold = precisionThreshold;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,10 +24,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||||
|
@ -41,7 +40,6 @@ import java.util.Objects;
|
||||||
|
|
||||||
public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoBoundsAggregationBuilder> {
|
public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoBoundsAggregationBuilder> {
|
||||||
public static final String NAME = "geo_bounds";
|
public static final String NAME = "geo_bounds";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<GeoBoundsAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<GeoBoundsAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -57,14 +55,14 @@ public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<
|
||||||
private boolean wrapLongitude = true;
|
private boolean wrapLongitude = true;
|
||||||
|
|
||||||
public GeoBoundsAggregationBuilder(String name) {
|
public GeoBoundsAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
super(name, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public GeoBoundsAggregationBuilder(StreamInput in) throws IOException {
|
public GeoBoundsAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
super(in, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||||
wrapLongitude = in.readBoolean();
|
wrapLongitude = in.readBoolean();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,7 +89,7 @@ public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<
|
||||||
@Override
|
@Override
|
||||||
protected GeoBoundsAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource.GeoPoint> config,
|
protected GeoBoundsAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource.GeoPoint> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new GeoBoundsAggregatorFactory(name, type, config, wrapLongitude, context, parent, subFactoriesBuilder, metaData);
|
return new GeoBoundsAggregatorFactory(name, config, wrapLongitude, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -112,7 +110,7 @@ public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.metrics.geobounds;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||||
|
@ -37,10 +36,10 @@ public class GeoBoundsAggregatorFactory extends ValuesSourceAggregatorFactory<Va
|
||||||
|
|
||||||
private final boolean wrapLongitude;
|
private final boolean wrapLongitude;
|
||||||
|
|
||||||
public GeoBoundsAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource.GeoPoint> config, boolean wrapLongitude,
|
public GeoBoundsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource.GeoPoint> config, boolean wrapLongitude,
|
||||||
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
this.wrapLongitude = wrapLongitude;
|
this.wrapLongitude = wrapLongitude;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,10 +24,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||||
|
@ -41,7 +40,6 @@ import java.io.IOException;
|
||||||
public class GeoCentroidAggregationBuilder
|
public class GeoCentroidAggregationBuilder
|
||||||
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregationBuilder> {
|
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregationBuilder> {
|
||||||
public static final String NAME = "geo_centroid";
|
public static final String NAME = "geo_centroid";
|
||||||
public static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<GeoCentroidAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<GeoCentroidAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -54,14 +52,14 @@ public class GeoCentroidAggregationBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
public GeoCentroidAggregationBuilder(String name) {
|
public GeoCentroidAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
super(name, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public GeoCentroidAggregationBuilder(StreamInput in) throws IOException {
|
public GeoCentroidAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
super(in, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -72,7 +70,7 @@ public class GeoCentroidAggregationBuilder
|
||||||
@Override
|
@Override
|
||||||
protected GeoCentroidAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource.GeoPoint> config,
|
protected GeoCentroidAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<ValuesSource.GeoPoint> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new GeoCentroidAggregatorFactory(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
return new GeoCentroidAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -91,7 +89,7 @@ public class GeoCentroidAggregationBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||||
|
@ -35,10 +34,10 @@ import java.util.Map;
|
||||||
|
|
||||||
public class GeoCentroidAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.GeoPoint, GeoCentroidAggregatorFactory> {
|
public class GeoCentroidAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.GeoPoint, GeoCentroidAggregatorFactory> {
|
||||||
|
|
||||||
public GeoCentroidAggregatorFactory(String name, Type type, ValuesSourceConfig<ValuesSource.GeoPoint> config,
|
public GeoCentroidAggregatorFactory(String name, ValuesSourceConfig<ValuesSource.GeoPoint> config,
|
||||||
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder,
|
||||||
Map<String, Object> metaData) throws IOException {
|
Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -24,10 +24,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -41,7 +40,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregationBuilder> {
|
public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregationBuilder> {
|
||||||
public static final String NAME = "max";
|
public static final String NAME = "max";
|
||||||
public static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<MaxAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<MaxAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -54,14 +52,14 @@ public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
}
|
}
|
||||||
|
|
||||||
public MaxAggregationBuilder(String name) {
|
public MaxAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
super(name, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public MaxAggregationBuilder(StreamInput in) throws IOException {
|
public MaxAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
super(in, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -72,7 +70,7 @@ public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
@Override
|
@Override
|
||||||
protected MaxAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
protected MaxAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new MaxAggregatorFactory(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
return new MaxAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -91,7 +89,7 @@ public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.metrics.max;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -36,9 +35,9 @@ import java.util.Map;
|
||||||
|
|
||||||
public class MaxAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, MaxAggregatorFactory> {
|
public class MaxAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, MaxAggregatorFactory> {
|
||||||
|
|
||||||
public MaxAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, SearchContext context,
|
public MaxAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, SearchContext context,
|
||||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -24,10 +24,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
|
@ -42,7 +41,6 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MinAggregationBuilder> {
|
public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MinAggregationBuilder> {
|
||||||
public static final String NAME = "min";
|
public static final String NAME = "min";
|
||||||
private static final Type TYPE = new Type(NAME);
|
|
||||||
|
|
||||||
private static final ObjectParser<MinAggregationBuilder, QueryParseContext> PARSER;
|
private static final ObjectParser<MinAggregationBuilder, QueryParseContext> PARSER;
|
||||||
static {
|
static {
|
||||||
|
@ -55,14 +53,14 @@ public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
}
|
}
|
||||||
|
|
||||||
public MinAggregationBuilder(String name) {
|
public MinAggregationBuilder(String name) {
|
||||||
super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
super(name, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Read from a stream.
|
* Read from a stream.
|
||||||
*/
|
*/
|
||||||
public MinAggregationBuilder(StreamInput in) throws IOException {
|
public MinAggregationBuilder(StreamInput in) throws IOException {
|
||||||
super(in, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
super(in, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -73,7 +71,7 @@ public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
@Override
|
@Override
|
||||||
protected MinAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
protected MinAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
||||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||||
return new MinAggregatorFactory(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
return new MinAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -92,7 +90,7 @@ public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOn
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getType() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.metrics.min;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||||
|
@ -36,9 +35,9 @@ import java.util.Map;
|
||||||
|
|
||||||
public class MinAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, MinAggregatorFactory> {
|
public class MinAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, MinAggregatorFactory> {
|
||||||
|
|
||||||
public MinAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config, SearchContext context,
|
public MinAggregatorFactory(String name, ValuesSourceConfig<Numeric> config, SearchContext context,
|
||||||
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, type, config, context, parent, subFactoriesBuilder, metaData);
|
super(name, config, context, parent, subFactoriesBuilder, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue