Extend field stats:
* Add isSearchable and isAggregatable (collapsed to true if any of the instances of that field are searchable or aggregatable). * Accept wildcards in field names. * Add a section named conflicts for fields with the same name but with incompatible types (instead of throwing an exception).
This commit is contained in:
parent
4f9929d439
commit
573c4f3ed1
|
@ -156,11 +156,6 @@
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]DeleteRequest.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]DeleteRequest.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]TransportDeleteAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]TransportDeleteAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]explain[/\\]TransportExplainAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]explain[/\\]TransportExplainAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStats.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsRequest.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsRequestBuilder.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsResponse.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]TransportFieldStatsTransportAction.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]GetRequest.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]GetRequest.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetRequest.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetRequest.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]TransportGetAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]TransportGetAction.java" checks="LineLength" />
|
||||||
|
@ -617,7 +612,6 @@
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestPendingClusterTasksAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestPendingClusterTasksAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]fieldstats[/\\]RestFieldStatsAction.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]get[/\\]RestMultiGetAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]get[/\\]RestMultiGetAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]index[/\\]RestIndexAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]index[/\\]RestIndexAction.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]main[/\\]RestMainAction.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]main[/\\]RestMainAction.java" checks="LineLength" />
|
||||||
|
@ -828,7 +822,6 @@
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkProcessorIT.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkProcessorIT.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequestTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequestTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]RetryTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]RetryTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsRequestTests.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetShardRequestTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetShardRequestTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]BulkRequestModifierTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]BulkRequestModifierTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]IngestProxyActionFilterTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]IngestProxyActionFilterTests.java" checks="LineLength" />
|
||||||
|
@ -987,8 +980,6 @@
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]EnvironmentTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]EnvironmentTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]NodeEnvironmentTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]NodeEnvironmentTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsIntegrationIT.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsTests.java" checks="LineLength" />
|
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayModuleTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayModuleTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayServiceTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayServiceTests.java" checks="LineLength" />
|
||||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayTests.java" checks="LineLength" />
|
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayTests.java" checks="LineLength" />
|
||||||
|
|
|
@ -24,41 +24,48 @@ import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.StringHelper;
|
import org.apache.lucene.util.StringHelper;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||||
import org.elasticsearch.common.joda.Joda;
|
import org.elasticsearch.common.joda.Joda;
|
||||||
|
import org.elasticsearch.common.network.InetAddresses;
|
||||||
import org.elasticsearch.common.network.NetworkAddress;
|
import org.elasticsearch.common.network.NetworkAddress;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
|
||||||
|
|
||||||
public abstract class FieldStats<T> implements Streamable, ToXContent {
|
|
||||||
|
|
||||||
|
public abstract class FieldStats<T> implements Writeable, ToXContent {
|
||||||
private final byte type;
|
private final byte type;
|
||||||
private long maxDoc;
|
private long maxDoc;
|
||||||
private long docCount;
|
private long docCount;
|
||||||
private long sumDocFreq;
|
private long sumDocFreq;
|
||||||
private long sumTotalTermFreq;
|
private long sumTotalTermFreq;
|
||||||
|
private boolean isSearchable;
|
||||||
|
private boolean isAggregatable;
|
||||||
protected T minValue;
|
protected T minValue;
|
||||||
protected T maxValue;
|
protected T maxValue;
|
||||||
|
|
||||||
protected FieldStats(int type) {
|
FieldStats(byte type, long maxDoc, boolean isSearchable, boolean isAggregatable) {
|
||||||
this.type = (byte) type;
|
this(type, maxDoc, 0, 0, 0, isSearchable, isAggregatable, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected FieldStats(int type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq) {
|
FieldStats(byte type,
|
||||||
this.type = (byte) type;
|
long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
|
boolean isSearchable, boolean isAggregatable, T minValue, T maxValue) {
|
||||||
|
this.type = type;
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
this.docCount = docCount;
|
this.docCount = docCount;
|
||||||
this.sumDocFreq = sumDocFreq;
|
this.sumDocFreq = sumDocFreq;
|
||||||
this.sumTotalTermFreq = sumTotalTermFreq;
|
this.sumTotalTermFreq = sumTotalTermFreq;
|
||||||
|
this.isSearchable = isSearchable;
|
||||||
|
this.isAggregatable = isAggregatable;
|
||||||
|
this.minValue = minValue;
|
||||||
|
this.maxValue = maxValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
byte getType() {
|
byte getType() {
|
||||||
return type;
|
return this.type;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -71,7 +78,8 @@ public abstract class FieldStats<T> implements Streamable, ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the number of documents that have at least one term for this field, or -1 if this measurement isn't available.
|
* @return the number of documents that have at least one term for this field,
|
||||||
|
* or -1 if this measurement isn't available.
|
||||||
*
|
*
|
||||||
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
||||||
*/
|
*/
|
||||||
|
@ -102,7 +110,8 @@ public abstract class FieldStats<T> implements Streamable, ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the sum of the term frequencies of all terms in this field across all documents, or -1 if this measurement
|
* @return the sum of the term frequencies of all terms in this field across all documents,
|
||||||
|
* or -1 if this measurement
|
||||||
* isn't available. Term frequency is the total number of occurrences of a term in a particular document and field.
|
* isn't available. Term frequency is the total number of occurrences of a term in a particular document and field.
|
||||||
*
|
*
|
||||||
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
||||||
|
@ -111,6 +120,20 @@ public abstract class FieldStats<T> implements Streamable, ToXContent {
|
||||||
return sumTotalTermFreq;
|
return sumTotalTermFreq;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return <code>true</code> if any of the instances of the field name is searchable.
|
||||||
|
*/
|
||||||
|
public boolean isSearchable() {
|
||||||
|
return isSearchable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return <code>true</code> if any of the instances of the field name is aggregatable.
|
||||||
|
*/
|
||||||
|
public boolean isAggregatable() {
|
||||||
|
return isAggregatable;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the lowest value in the field.
|
* @return the lowest value in the field.
|
||||||
*
|
*
|
||||||
|
@ -152,33 +175,96 @@ public abstract class FieldStats<T> implements Streamable, ToXContent {
|
||||||
protected abstract T valueOf(String value, String optionalFormat);
|
protected abstract T valueOf(String value, String optionalFormat);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Merges the provided stats into this stats instance.
|
* Accumulates the provided stats into this stats instance.
|
||||||
*/
|
*/
|
||||||
public void append(FieldStats stats) {
|
public final void accumulate(FieldStats other) {
|
||||||
this.maxDoc += stats.maxDoc;
|
this.maxDoc += other.maxDoc;
|
||||||
if (stats.docCount == -1) {
|
if (other.docCount == -1) {
|
||||||
this.docCount = -1;
|
this.docCount = -1;
|
||||||
} else if (this.docCount != -1) {
|
} else if (this.docCount != -1) {
|
||||||
this.docCount += stats.docCount;
|
this.docCount += other.docCount;
|
||||||
}
|
}
|
||||||
if (stats.sumDocFreq == -1) {
|
if (other.sumDocFreq == -1) {
|
||||||
this.sumDocFreq = -1;
|
this.sumDocFreq = -1;
|
||||||
} else if (this.sumDocFreq != -1) {
|
} else if (this.sumDocFreq != -1) {
|
||||||
this.sumDocFreq += stats.sumDocFreq;
|
this.sumDocFreq += other.sumDocFreq;
|
||||||
}
|
}
|
||||||
if (stats.sumTotalTermFreq == -1) {
|
if (other.sumTotalTermFreq == -1) {
|
||||||
this.sumTotalTermFreq = -1;
|
this.sumTotalTermFreq = -1;
|
||||||
} else if (this.sumTotalTermFreq != -1) {
|
} else if (this.sumTotalTermFreq != -1) {
|
||||||
this.sumTotalTermFreq += stats.sumTotalTermFreq;
|
this.sumTotalTermFreq += other.sumTotalTermFreq;
|
||||||
|
}
|
||||||
|
|
||||||
|
isSearchable |= other.isSearchable;
|
||||||
|
isAggregatable |= other.isAggregatable;
|
||||||
|
|
||||||
|
assert type == other.getType();
|
||||||
|
updateMinMax((T) other.minValue, (T) other.maxValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateMinMax(T min, T max) {
|
||||||
|
if (minValue == null) {
|
||||||
|
minValue = min;
|
||||||
|
} else if (min != null && compare(minValue, min) > 0) {
|
||||||
|
minValue = min;
|
||||||
|
}
|
||||||
|
if (maxValue == null) {
|
||||||
|
maxValue = max;
|
||||||
|
} else if (max != null && compare(maxValue, max) < 0) {
|
||||||
|
maxValue = max;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract int compare(T a, T b);
|
protected abstract int compare(T o1, T o2);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(Fields.MAX_DOC, maxDoc);
|
||||||
|
builder.field(Fields.DOC_COUNT, docCount);
|
||||||
|
builder.field(Fields.DENSITY, getDensity());
|
||||||
|
builder.field(Fields.SUM_DOC_FREQ, sumDocFreq);
|
||||||
|
builder.field(Fields.SUM_TOTAL_TERM_FREQ, sumTotalTermFreq);
|
||||||
|
builder.field(Fields.SEARCHABLE, isSearchable);
|
||||||
|
builder.field(Fields.AGGREGATABLE, isAggregatable);
|
||||||
|
toInnerXContent(builder);
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void toInnerXContent(XContentBuilder builder) throws IOException {
|
||||||
|
builder.field(Fields.MIN_VALUE, getMinValue());
|
||||||
|
builder.field(Fields.MIN_VALUE_AS_STRING, getMinValueAsString());
|
||||||
|
builder.field(Fields.MAX_VALUE, getMaxValue());
|
||||||
|
builder.field(Fields.MAX_VALUE_AS_STRING, getMaxValueAsString());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final void writeTo(StreamOutput out) throws IOException {
|
||||||
|
out.writeByte(type);
|
||||||
|
out.writeLong(maxDoc);
|
||||||
|
out.writeLong(docCount);
|
||||||
|
out.writeLong(sumDocFreq);
|
||||||
|
out.writeLong(sumTotalTermFreq);
|
||||||
|
out.writeBoolean(isSearchable);
|
||||||
|
out.writeBoolean(isAggregatable);
|
||||||
|
boolean hasMinMax = minValue != null;
|
||||||
|
out.writeBoolean(hasMinMax);
|
||||||
|
if (hasMinMax) {
|
||||||
|
writeMinMax(out);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract void writeMinMax(StreamOutput out) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return <code>true</code> if this instance matches with the provided index constraint, otherwise <code>false</code> is returned
|
* @return <code>true</code> if this instance matches with the provided index constraint,
|
||||||
|
* otherwise <code>false</code> is returned
|
||||||
*/
|
*/
|
||||||
public boolean match(IndexConstraint constraint) {
|
public boolean match(IndexConstraint constraint) {
|
||||||
|
if (minValue == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
int cmp;
|
int cmp;
|
||||||
T value = valueOf(constraint.getValue(), constraint.getOptionalFormat());
|
T value = valueOf(constraint.getValue(), constraint.getOptionalFormat());
|
||||||
if (constraint.getProperty() == IndexConstraint.Property.MIN) {
|
if (constraint.getProperty() == IndexConstraint.Property.MIN) {
|
||||||
|
@ -203,202 +289,179 @@ public abstract class FieldStats<T> implements Streamable, ToXContent {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public static class Long extends FieldStats<java.lang.Long> {
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
builder.startObject();
|
boolean isSearchable, boolean isAggregatable,
|
||||||
builder.field(Fields.MAX_DOC, maxDoc);
|
long minValue, long maxValue) {
|
||||||
builder.field(Fields.DOC_COUNT, docCount);
|
super((byte) 0, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
builder.field(Fields.DENSITY, getDensity());
|
isSearchable, isAggregatable, minValue, maxValue);
|
||||||
builder.field(Fields.SUM_DOC_FREQ, sumDocFreq);
|
|
||||||
builder.field(Fields.SUM_TOTAL_TERM_FREQ, sumTotalTermFreq);
|
|
||||||
toInnerXContent(builder);
|
|
||||||
builder.endObject();
|
|
||||||
return builder;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void toInnerXContent(XContentBuilder builder) throws IOException {
|
|
||||||
builder.field(Fields.MIN_VALUE, getMinValue());
|
|
||||||
builder.field(Fields.MIN_VALUE_AS_STRING, getMinValueAsString());
|
|
||||||
builder.field(Fields.MAX_VALUE, getMaxValue());
|
|
||||||
builder.field(Fields.MAX_VALUE_AS_STRING, getMaxValueAsString());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
|
||||||
maxDoc = in.readVLong();
|
|
||||||
docCount = in.readLong();
|
|
||||||
sumDocFreq = in.readLong();
|
|
||||||
sumTotalTermFreq = in.readLong();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
out.writeByte(type);
|
|
||||||
out.writeVLong(maxDoc);
|
|
||||||
out.writeLong(docCount);
|
|
||||||
out.writeLong(sumDocFreq);
|
|
||||||
out.writeLong(sumTotalTermFreq);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static abstract class ComparableFieldStats<T extends Comparable<? super T>> extends FieldStats<T> {
|
|
||||||
protected ComparableFieldStats(int type) {
|
|
||||||
super(type);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected ComparableFieldStats(int type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq) {
|
public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
super(type, maxDoc, docCount, sumDocFreq, sumTotalTermFreq);
|
boolean isSearchable, boolean isAggregatable) {
|
||||||
|
super((byte) 0, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long(long maxDoc,
|
||||||
|
boolean isSearchable, boolean isAggregatable) {
|
||||||
|
super((byte) 0, maxDoc, isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected int compare(T a, T b) {
|
public int compare(java.lang.Long o1, java.lang.Long o2) {
|
||||||
return a.compareTo(b);
|
return o1.compareTo(o2);
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class Long extends ComparableFieldStats<java.lang.Long> {
|
|
||||||
|
|
||||||
public Long() {
|
|
||||||
super(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, long minValue, long maxValue) {
|
|
||||||
this(0, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, minValue, maxValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected Long(int type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, long minValue, long maxValue) {
|
|
||||||
super(type, maxDoc, docCount, sumDocFreq, sumTotalTermFreq);
|
|
||||||
this.minValue = minValue;
|
|
||||||
this.maxValue = maxValue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getMinValueAsString() {
|
public void writeMinMax(StreamOutput out) throws IOException {
|
||||||
return String.valueOf(minValue.longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getMaxValueAsString() {
|
|
||||||
return String.valueOf(maxValue.longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void append(FieldStats stats) {
|
|
||||||
super.append(stats);
|
|
||||||
Long other = (Long) stats;
|
|
||||||
this.minValue = Math.min(other.minValue, minValue);
|
|
||||||
this.maxValue = Math.max(other.maxValue, maxValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected java.lang.Long valueOf(String value, String optionalFormat) {
|
|
||||||
if (optionalFormat != null) {
|
|
||||||
throw new UnsupportedOperationException("custom format isn't supported");
|
|
||||||
}
|
|
||||||
return java.lang.Long.valueOf(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
|
||||||
super.readFrom(in);
|
|
||||||
minValue = in.readLong();
|
|
||||||
maxValue = in.readLong();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
super.writeTo(out);
|
|
||||||
out.writeLong(minValue);
|
out.writeLong(minValue);
|
||||||
out.writeLong(maxValue);
|
out.writeLong(maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
@Override
|
||||||
|
public java.lang.Long valueOf(String value, String optionalFormat) {
|
||||||
public static final class Double extends ComparableFieldStats<java.lang.Double> {
|
return java.lang.Long.parseLong(value);
|
||||||
|
|
||||||
public Double() {
|
|
||||||
super(2);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, double minValue, double maxValue) {
|
|
||||||
super(2, maxDoc, docCount, sumDocFreq, sumTotalTermFreq);
|
|
||||||
this.minValue = minValue;
|
|
||||||
this.maxValue = maxValue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getMinValueAsString() {
|
public String getMinValueAsString() {
|
||||||
return String.valueOf(minValue.doubleValue());
|
return minValue != null ? java.lang.Long.toString(minValue) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getMaxValueAsString() {
|
public String getMaxValueAsString() {
|
||||||
return String.valueOf(maxValue.doubleValue());
|
return maxValue != null ? java.lang.Long.toString(maxValue) : null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Double extends FieldStats<java.lang.Double> {
|
||||||
|
public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
|
boolean isSearchable, boolean isAggregatable,
|
||||||
|
double minValue, double maxValue) {
|
||||||
|
super((byte) 1, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable,
|
||||||
|
minValue, maxValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
|
boolean isSearchable, boolean isAggregatable) {
|
||||||
|
super((byte) 1, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable, null, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Double(long maxDoc, boolean isSearchable, boolean isAggregatable) {
|
||||||
|
super((byte) 1, maxDoc, isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void append(FieldStats stats) {
|
public int compare(java.lang.Double o1, java.lang.Double o2) {
|
||||||
super.append(stats);
|
return o1.compareTo(o2);
|
||||||
Double other = (Double) stats;
|
|
||||||
this.minValue = Math.min(other.minValue, minValue);
|
|
||||||
this.maxValue = Math.max(other.maxValue, maxValue);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected java.lang.Double valueOf(String value, String optionalFormat) {
|
public void writeMinMax(StreamOutput out) throws IOException {
|
||||||
if (optionalFormat != null) {
|
|
||||||
throw new UnsupportedOperationException("custom format isn't supported");
|
|
||||||
}
|
|
||||||
return java.lang.Double.valueOf(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
|
||||||
super.readFrom(in);
|
|
||||||
minValue = in.readDouble();
|
|
||||||
maxValue = in.readDouble();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
super.writeTo(out);
|
|
||||||
out.writeDouble(minValue);
|
out.writeDouble(minValue);
|
||||||
out.writeDouble(maxValue);
|
out.writeDouble(maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
@Override
|
||||||
|
public java.lang.Double valueOf(String value, String optionalFormat) {
|
||||||
public static final class Text extends ComparableFieldStats<BytesRef> {
|
if (optionalFormat != null) {
|
||||||
|
throw new UnsupportedOperationException("custom format isn't supported");
|
||||||
public Text() {
|
}
|
||||||
super(3);
|
return java.lang.Double.parseDouble(value);
|
||||||
}
|
|
||||||
|
|
||||||
public Text(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, BytesRef minValue, BytesRef maxValue) {
|
|
||||||
super(3, maxDoc, docCount, sumDocFreq, sumTotalTermFreq);
|
|
||||||
this.minValue = minValue;
|
|
||||||
this.maxValue = maxValue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getMinValueAsString() {
|
public String getMinValueAsString() {
|
||||||
return minValue.utf8ToString();
|
return minValue != null ? java.lang.Double.toString(minValue) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getMaxValueAsString() {
|
public String getMaxValueAsString() {
|
||||||
return maxValue.utf8ToString();
|
return maxValue != null ? java.lang.Double.toString(maxValue) : null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Date extends FieldStats<java.lang.Long> {
|
||||||
|
private FormatDateTimeFormatter formatter;
|
||||||
|
|
||||||
|
public Date(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
|
boolean isSearchable, boolean isAggregatable,
|
||||||
|
FormatDateTimeFormatter formatter,
|
||||||
|
long minValue, long maxValue) {
|
||||||
|
super((byte) 2, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable,
|
||||||
|
minValue, maxValue);
|
||||||
|
this.formatter = formatter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
|
boolean isSearchable, boolean isAggregatable,
|
||||||
|
FormatDateTimeFormatter formatter) {
|
||||||
|
super((byte) 2, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable,
|
||||||
|
null, null);
|
||||||
|
this.formatter = formatter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date(long maxDoc, boolean isSearchable, boolean isAggregatable,
|
||||||
|
FormatDateTimeFormatter formatter) {
|
||||||
|
super((byte) 2, maxDoc, isSearchable, isAggregatable);
|
||||||
|
this.formatter = formatter;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void append(FieldStats stats) {
|
public int compare(java.lang.Long o1, java.lang.Long o2) {
|
||||||
super.append(stats);
|
return o1.compareTo(o2);
|
||||||
Text other = (Text) stats;
|
}
|
||||||
if (other.minValue.compareTo(minValue) < 0) {
|
|
||||||
minValue = other.minValue;
|
@Override
|
||||||
}
|
public void writeMinMax(StreamOutput out) throws IOException {
|
||||||
if (other.maxValue.compareTo(maxValue) > 0) {
|
out.writeString(formatter.format());
|
||||||
maxValue = other.maxValue;
|
out.writeLong(minValue);
|
||||||
|
out.writeLong(maxValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public java.lang.Long valueOf(String value, String fmt) {
|
||||||
|
FormatDateTimeFormatter f = formatter;
|
||||||
|
if (fmt != null) {
|
||||||
|
f = Joda.forPattern(fmt);
|
||||||
}
|
}
|
||||||
|
return f.parser().parseDateTime(value).getMillis();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getMinValueAsString() {
|
||||||
|
return minValue != null ? formatter.printer().print(minValue) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getMaxValueAsString() {
|
||||||
|
return maxValue != null ? formatter.printer().print(maxValue) : null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Text extends FieldStats<BytesRef> {
|
||||||
|
public Text(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
|
boolean isSearchable, boolean isAggregatable,
|
||||||
|
BytesRef minValue, BytesRef maxValue) {
|
||||||
|
super((byte) 3, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable,
|
||||||
|
minValue, maxValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Text(long maxDoc, boolean isSearchable, boolean isAggregatable) {
|
||||||
|
super((byte) 3, maxDoc, isSearchable, isAggregatable);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compare(BytesRef o1, BytesRef o2) {
|
||||||
|
return o1.compareTo(o2);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeMinMax(StreamOutput out) throws IOException {
|
||||||
|
out.writeBytesRef(minValue);
|
||||||
|
out.writeBytesRef(maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -409,167 +472,160 @@ public abstract class FieldStats<T> implements Streamable, ToXContent {
|
||||||
return new BytesRef(value);
|
return new BytesRef(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getMinValueAsString() {
|
||||||
|
return minValue != null ? minValue.utf8ToString() : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getMaxValueAsString() {
|
||||||
|
return maxValue != null ? maxValue.utf8ToString() : null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void toInnerXContent(XContentBuilder builder) throws IOException {
|
protected void toInnerXContent(XContentBuilder builder) throws IOException {
|
||||||
builder.field(Fields.MIN_VALUE, getMinValueAsString());
|
builder.field(Fields.MIN_VALUE, getMinValueAsString());
|
||||||
builder.field(Fields.MAX_VALUE, getMaxValueAsString());
|
builder.field(Fields.MAX_VALUE, getMaxValueAsString());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
|
||||||
super.readFrom(in);
|
|
||||||
minValue = in.readBytesRef();
|
|
||||||
maxValue = in.readBytesRef();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
super.writeTo(out);
|
|
||||||
out.writeBytesRef(minValue);
|
|
||||||
out.writeBytesRef(maxValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public static final class Date extends Long {
|
|
||||||
|
|
||||||
private FormatDateTimeFormatter dateFormatter;
|
|
||||||
|
|
||||||
public Date() {
|
|
||||||
}
|
|
||||||
|
|
||||||
public Date(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq, long minValue, long maxValue, FormatDateTimeFormatter dateFormatter) {
|
|
||||||
super(1, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, minValue, maxValue);
|
|
||||||
this.dateFormatter = dateFormatter;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getMinValueAsString() {
|
|
||||||
return dateFormatter.printer().print(minValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getMaxValueAsString() {
|
|
||||||
return dateFormatter.printer().print(maxValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected java.lang.Long valueOf(String value, String optionalFormat) {
|
|
||||||
FormatDateTimeFormatter dateFormatter = this.dateFormatter;
|
|
||||||
if (optionalFormat != null) {
|
|
||||||
dateFormatter = Joda.forPattern(optionalFormat);
|
|
||||||
}
|
|
||||||
return dateFormatter.parser().parseMillis(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
|
||||||
super.readFrom(in);
|
|
||||||
dateFormatter = Joda.forPattern(in.readString());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
super.writeTo(out);
|
|
||||||
out.writeString(dateFormatter.format());
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Ip extends FieldStats<InetAddress> {
|
public static class Ip extends FieldStats<InetAddress> {
|
||||||
|
public Ip(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
|
||||||
private InetAddress minValue, maxValue;
|
boolean isSearchable, boolean isAggregatable,
|
||||||
|
InetAddress minValue, InetAddress maxValue) {
|
||||||
public Ip(int maxDoc, int docCount, long sumDocFreq, long sumTotalTermFreq,
|
super((byte) 4, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
InetAddress minValue, InetAddress maxValue) {
|
isSearchable, isAggregatable,
|
||||||
super(4, maxDoc, docCount, sumDocFreq, sumTotalTermFreq);
|
minValue, maxValue);
|
||||||
this.minValue = minValue;
|
|
||||||
this.maxValue = maxValue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Ip() {
|
public Ip(long maxDoc, boolean isSearchable, boolean isAggregatable) {
|
||||||
super(4);
|
super((byte) 4, maxDoc, isSearchable, isAggregatable);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compare(InetAddress o1, InetAddress o2) {
|
||||||
|
byte[] b1 = InetAddressPoint.encode(o1);
|
||||||
|
byte[] b2 = InetAddressPoint.encode(o2);
|
||||||
|
return StringHelper.compare(b1.length, b1, 0, b2, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeMinMax(StreamOutput out) throws IOException {
|
||||||
|
byte[] b1 = InetAddressPoint.encode(minValue);
|
||||||
|
byte[] b2 = InetAddressPoint.encode(maxValue);
|
||||||
|
out.writeByte((byte) b1.length);
|
||||||
|
out.writeBytes(b1);
|
||||||
|
out.writeByte((byte) b2.length);
|
||||||
|
out.writeBytes(b2);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public InetAddress valueOf(String value, String fmt) {
|
||||||
|
return InetAddresses.forString(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getMinValueAsString() {
|
public String getMinValueAsString() {
|
||||||
return NetworkAddress.format(minValue);
|
return minValue != null ? NetworkAddress.format(minValue) : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getMaxValueAsString() {
|
public String getMaxValueAsString() {
|
||||||
return NetworkAddress.format(maxValue);
|
return maxValue != null ? NetworkAddress.format(maxValue) : null;
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected InetAddress valueOf(String value, String optionalFormat) {
|
|
||||||
try {
|
|
||||||
return InetAddress.getByName(value);
|
|
||||||
} catch (UnknownHostException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected int compare(InetAddress a, InetAddress b) {
|
|
||||||
byte[] ab = InetAddressPoint.encode(a);
|
|
||||||
byte[] bb = InetAddressPoint.encode(b);
|
|
||||||
return StringHelper.compare(ab.length, ab, 0, bb, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
|
||||||
super.readFrom(in);
|
|
||||||
minValue = valueOf(in.readString(), null);
|
|
||||||
maxValue = valueOf(in.readString(), null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
|
||||||
super.writeTo(out);
|
|
||||||
out.writeString(NetworkAddress.format(minValue));
|
|
||||||
out.writeString(NetworkAddress.format(maxValue));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static FieldStats read(StreamInput in) throws IOException {
|
public static FieldStats readFrom(StreamInput in) throws IOException {
|
||||||
FieldStats stats;
|
|
||||||
byte type = in.readByte();
|
byte type = in.readByte();
|
||||||
|
long maxDoc = in.readLong();
|
||||||
|
long docCount = in.readLong();
|
||||||
|
long sumDocFreq = in.readLong();
|
||||||
|
long sumTotalTermFreq = in.readLong();
|
||||||
|
boolean isSearchable = in.readBoolean();
|
||||||
|
boolean isAggregatable = in.readBoolean();
|
||||||
|
boolean hasMinMax = in.readBoolean();
|
||||||
|
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case 0:
|
case 0:
|
||||||
stats = new Long();
|
if (hasMinMax) {
|
||||||
break;
|
return new Long(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, in.readLong(), in.readLong());
|
||||||
|
}
|
||||||
|
return new Long(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable);
|
||||||
|
|
||||||
case 1:
|
case 1:
|
||||||
stats = new Date();
|
if (hasMinMax) {
|
||||||
break;
|
return new Double(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, in.readDouble(), in.readDouble());
|
||||||
|
}
|
||||||
|
return new Double(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable);
|
||||||
|
|
||||||
case 2:
|
case 2:
|
||||||
stats = new Double();
|
FormatDateTimeFormatter formatter = Joda.forPattern(in.readString());
|
||||||
break;
|
if (hasMinMax) {
|
||||||
|
return new Date(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, formatter, in.readLong(), in.readLong());
|
||||||
|
}
|
||||||
|
return new Date(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, formatter);
|
||||||
|
|
||||||
case 3:
|
case 3:
|
||||||
stats = new Text();
|
if (hasMinMax) {
|
||||||
break;
|
return new Text(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, in.readBytesRef(), in.readBytesRef());
|
||||||
|
}
|
||||||
|
return new Text(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, null, null);
|
||||||
|
|
||||||
case 4:
|
case 4:
|
||||||
stats = new Ip();
|
InetAddress min = null;
|
||||||
break;
|
InetAddress max = null;
|
||||||
|
if (hasMinMax) {
|
||||||
|
int l1 = in.readByte();
|
||||||
|
byte[] b1 = new byte[l1];
|
||||||
|
int l2 = in.readByte();
|
||||||
|
byte[] b2 = new byte[l2];
|
||||||
|
min = InetAddressPoint.decode(b1);
|
||||||
|
max = InetAddressPoint.decode(b2);
|
||||||
|
}
|
||||||
|
return new Ip(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
|
||||||
|
isSearchable, isAggregatable, min, max);
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new IllegalArgumentException("Illegal type [" + type + "]");
|
throw new IllegalArgumentException("Unknown type.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String typeName(byte type) {
|
||||||
|
switch (type) {
|
||||||
|
case 0:
|
||||||
|
return "whole-number";
|
||||||
|
case 1:
|
||||||
|
return "floating-point";
|
||||||
|
case 2:
|
||||||
|
return "date";
|
||||||
|
case 3:
|
||||||
|
return "text";
|
||||||
|
case 4:
|
||||||
|
return "ip";
|
||||||
|
default:
|
||||||
|
throw new IllegalArgumentException("Unknown type.");
|
||||||
}
|
}
|
||||||
stats.readFrom(in);
|
|
||||||
return stats;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private final static class Fields {
|
private final static class Fields {
|
||||||
|
|
||||||
final static String MAX_DOC = new String("max_doc");
|
final static String MAX_DOC = new String("max_doc");
|
||||||
final static String DOC_COUNT = new String("doc_count");
|
final static String DOC_COUNT = new String("doc_count");
|
||||||
final static String DENSITY = new String("density");
|
final static String DENSITY = new String("density");
|
||||||
final static String SUM_DOC_FREQ = new String("sum_doc_freq");
|
final static String SUM_DOC_FREQ = new String("sum_doc_freq");
|
||||||
final static String SUM_TOTAL_TERM_FREQ = new String("sum_total_term_freq");
|
final static String SUM_TOTAL_TERM_FREQ = new String("sum_total_term_freq");
|
||||||
|
final static String SEARCHABLE = new String("searchable");
|
||||||
|
final static String AGGREGATABLE = new String("aggregatable");
|
||||||
final static String MIN_VALUE = new String("min_value");
|
final static String MIN_VALUE = new String("min_value");
|
||||||
final static String MIN_VALUE_AS_STRING = new String("min_value_as_string");
|
final static String MIN_VALUE_AS_STRING = new String("min_value_as_string");
|
||||||
final static String MAX_VALUE = new String("max_value");
|
final static String MAX_VALUE = new String("max_value");
|
||||||
final static String MAX_VALUE_AS_STRING = new String("max_value_as_string");
|
final static String MAX_VALUE_AS_STRING = new String("max_value_as_string");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,7 +108,8 @@ public class FieldStatsRequest extends BroadcastRequest<FieldStatsRequest> {
|
||||||
this.indexConstraints = indexConstraints.toArray(new IndexConstraint[indexConstraints.size()]);
|
this.indexConstraints = indexConstraints.toArray(new IndexConstraint[indexConstraints.size()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void parseIndexContraints(List<IndexConstraint> indexConstraints, XContentParser parser) throws IOException {
|
private void parseIndexContraints(List<IndexConstraint> indexConstraints,
|
||||||
|
XContentParser parser) throws IOException {
|
||||||
Token token = parser.currentToken();
|
Token token = parser.currentToken();
|
||||||
assert token == Token.START_OBJECT;
|
assert token == Token.START_OBJECT;
|
||||||
String field = null;
|
String field = null;
|
||||||
|
@ -117,7 +118,8 @@ public class FieldStatsRequest extends BroadcastRequest<FieldStatsRequest> {
|
||||||
if (token == Token.FIELD_NAME) {
|
if (token == Token.FIELD_NAME) {
|
||||||
field = currentName = parser.currentName();
|
field = currentName = parser.currentName();
|
||||||
} else if (token == Token.START_OBJECT) {
|
} else if (token == Token.START_OBJECT) {
|
||||||
for (Token fieldToken = parser.nextToken(); fieldToken != Token.END_OBJECT; fieldToken = parser.nextToken()) {
|
for (Token fieldToken = parser.nextToken();
|
||||||
|
fieldToken != Token.END_OBJECT; fieldToken = parser.nextToken()) {
|
||||||
if (fieldToken == Token.FIELD_NAME) {
|
if (fieldToken == Token.FIELD_NAME) {
|
||||||
currentName = parser.currentName();
|
currentName = parser.currentName();
|
||||||
} else if (fieldToken == Token.START_OBJECT) {
|
} else if (fieldToken == Token.START_OBJECT) {
|
||||||
|
@ -125,7 +127,8 @@ public class FieldStatsRequest extends BroadcastRequest<FieldStatsRequest> {
|
||||||
String value = null;
|
String value = null;
|
||||||
String optionalFormat = null;
|
String optionalFormat = null;
|
||||||
IndexConstraint.Comparison comparison = null;
|
IndexConstraint.Comparison comparison = null;
|
||||||
for (Token propertyToken = parser.nextToken(); propertyToken != Token.END_OBJECT; propertyToken = parser.nextToken()) {
|
for (Token propertyToken = parser.nextToken();
|
||||||
|
propertyToken != Token.END_OBJECT; propertyToken = parser.nextToken()) {
|
||||||
if (propertyToken.isValue()) {
|
if (propertyToken.isValue()) {
|
||||||
if ("format".equals(parser.currentName())) {
|
if ("format".equals(parser.currentName())) {
|
||||||
optionalFormat = parser.text();
|
optionalFormat = parser.text();
|
||||||
|
@ -162,7 +165,8 @@ public class FieldStatsRequest extends BroadcastRequest<FieldStatsRequest> {
|
||||||
public ActionRequestValidationException validate() {
|
public ActionRequestValidationException validate() {
|
||||||
ActionRequestValidationException validationException = super.validate();
|
ActionRequestValidationException validationException = super.validate();
|
||||||
if ("cluster".equals(level) == false && "indices".equals(level) == false) {
|
if ("cluster".equals(level) == false && "indices".equals(level) == false) {
|
||||||
validationException = ValidateActions.addValidationError("invalid level option [" + level + "]", validationException);
|
validationException =
|
||||||
|
ValidateActions.addValidationError("invalid level option [" + level + "]", validationException);
|
||||||
}
|
}
|
||||||
if (fields == null || fields.length == 0) {
|
if (fields == null || fields.length == 0) {
|
||||||
validationException = ValidateActions.addValidationError("no fields specified", validationException);
|
validationException = ValidateActions.addValidationError("no fields specified", validationException);
|
||||||
|
|
|
@ -24,7 +24,8 @@ import org.elasticsearch.client.ElasticsearchClient;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class FieldStatsRequestBuilder extends BroadcastOperationRequestBuilder<FieldStatsRequest, FieldStatsResponse, FieldStatsRequestBuilder> {
|
public class FieldStatsRequestBuilder extends
|
||||||
|
BroadcastOperationRequestBuilder<FieldStatsRequest, FieldStatsResponse, FieldStatsRequestBuilder> {
|
||||||
|
|
||||||
public FieldStatsRequestBuilder(ElasticsearchClient client, FieldStatsAction action) {
|
public FieldStatsRequestBuilder(ElasticsearchClient client, FieldStatsAction action) {
|
||||||
super(client, action, new FieldStatsRequest());
|
super(client, action, new FieldStatsRequest());
|
||||||
|
|
|
@ -33,15 +33,19 @@ import java.util.Map;
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class FieldStatsResponse extends BroadcastResponse {
|
public class FieldStatsResponse extends BroadcastResponse {
|
||||||
|
|
||||||
private Map<String, Map<String, FieldStats>> indicesMergedFieldStats;
|
private Map<String, Map<String, FieldStats>> indicesMergedFieldStats;
|
||||||
|
private Map<String, String> conflicts;
|
||||||
|
|
||||||
public FieldStatsResponse() {
|
public FieldStatsResponse() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldStatsResponse(int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures, Map<String, Map<String, FieldStats>> indicesMergedFieldStats) {
|
public FieldStatsResponse(int totalShards, int successfulShards, int failedShards,
|
||||||
|
List<ShardOperationFailedException> shardFailures,
|
||||||
|
Map<String, Map<String, FieldStats>> indicesMergedFieldStats,
|
||||||
|
Map<String, String> conflicts) {
|
||||||
super(totalShards, successfulShards, failedShards, shardFailures);
|
super(totalShards, successfulShards, failedShards, shardFailures);
|
||||||
this.indicesMergedFieldStats = indicesMergedFieldStats;
|
this.indicesMergedFieldStats = indicesMergedFieldStats;
|
||||||
|
this.conflicts = conflicts;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
|
@ -49,6 +53,10 @@ public class FieldStatsResponse extends BroadcastResponse {
|
||||||
return indicesMergedFieldStats.get("_all");
|
return indicesMergedFieldStats.get("_all");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Map<String, String> getConflicts() {
|
||||||
|
return conflicts;
|
||||||
|
}
|
||||||
|
|
||||||
public Map<String, Map<String, FieldStats>> getIndicesMergedFieldStats() {
|
public Map<String, Map<String, FieldStats>> getIndicesMergedFieldStats() {
|
||||||
return indicesMergedFieldStats;
|
return indicesMergedFieldStats;
|
||||||
}
|
}
|
||||||
|
@ -56,7 +64,7 @@ public class FieldStatsResponse extends BroadcastResponse {
|
||||||
@Override
|
@Override
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
public void readFrom(StreamInput in) throws IOException {
|
||||||
super.readFrom(in);
|
super.readFrom(in);
|
||||||
final int size = in.readVInt();
|
int size = in.readVInt();
|
||||||
indicesMergedFieldStats = new HashMap<>(size);
|
indicesMergedFieldStats = new HashMap<>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
String key = in.readString();
|
String key = in.readString();
|
||||||
|
@ -65,10 +73,18 @@ public class FieldStatsResponse extends BroadcastResponse {
|
||||||
indicesMergedFieldStats.put(key, indexFieldStats);
|
indicesMergedFieldStats.put(key, indexFieldStats);
|
||||||
for (int j = 0; j < indexSize; j++) {
|
for (int j = 0; j < indexSize; j++) {
|
||||||
key = in.readString();
|
key = in.readString();
|
||||||
FieldStats value = FieldStats.read(in);
|
FieldStats value = FieldStats.readFrom(in);
|
||||||
indexFieldStats.put(key, value);
|
indexFieldStats.put(key, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
size = in.readVInt();
|
||||||
|
conflicts = new HashMap<>(size);
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
|
String key = in.readString();
|
||||||
|
String value = in.readString();
|
||||||
|
conflicts.put(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -83,5 +99,10 @@ public class FieldStatsResponse extends BroadcastResponse {
|
||||||
entry2.getValue().writeTo(out);
|
entry2.getValue().writeTo(out);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
out.writeVInt(conflicts.size());
|
||||||
|
for (Map.Entry<String, String> entry : conflicts.entrySet()) {
|
||||||
|
out.writeString(entry.getKey());
|
||||||
|
out.writeString(entry.getValue());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class FieldStatsShardResponse extends BroadcastShardResponse {
|
||||||
fieldStats = new HashMap<>(size);
|
fieldStats = new HashMap<>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
String key = in.readString();
|
String key = in.readString();
|
||||||
FieldStats value = FieldStats.read(in);
|
FieldStats value = FieldStats.readFrom(in);
|
||||||
fieldStats.put(key, value);
|
fieldStats.put(key, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,8 @@ public class IndexConstraint {
|
||||||
this(field, property, comparison, value, null);
|
this(field, property, comparison, value, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public IndexConstraint(String field, Property property, Comparison comparison, String value, String optionalFormat) {
|
public IndexConstraint(String field, Property property,
|
||||||
|
Comparison comparison, String value, String optionalFormat) {
|
||||||
this.field = Objects.requireNonNull(field);
|
this.field = Objects.requireNonNull(field);
|
||||||
this.property = Objects.requireNonNull(property);
|
this.property = Objects.requireNonNull(property);
|
||||||
this.comparison = Objects.requireNonNull(comparison);
|
this.comparison = Objects.requireNonNull(comparison);
|
||||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.GroupShardsIterator;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.regex.Regex;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.engine.Engine;
|
import org.elasticsearch.index.engine.Engine;
|
||||||
|
@ -45,32 +46,41 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||||
|
|
||||||
public class TransportFieldStatsTransportAction extends TransportBroadcastAction<FieldStatsRequest, FieldStatsResponse, FieldStatsShardRequest, FieldStatsShardResponse> {
|
public class TransportFieldStatsTransportAction extends
|
||||||
|
TransportBroadcastAction<FieldStatsRequest, FieldStatsResponse, FieldStatsShardRequest, FieldStatsShardResponse> {
|
||||||
|
|
||||||
private final IndicesService indicesService;
|
private final IndicesService indicesService;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public TransportFieldStatsTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
|
public TransportFieldStatsTransportAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
|
||||||
TransportService transportService, ActionFilters actionFilters,
|
TransportService transportService, ActionFilters actionFilters,
|
||||||
IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService) {
|
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||||
super(settings, FieldStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, FieldStatsRequest::new, FieldStatsShardRequest::new, ThreadPool.Names.MANAGEMENT);
|
IndicesService indicesService) {
|
||||||
|
super(settings, FieldStatsAction.NAME, threadPool, clusterService, transportService,
|
||||||
|
actionFilters, indexNameExpressionResolver, FieldStatsRequest::new,
|
||||||
|
FieldStatsShardRequest::new, ThreadPool.Names.MANAGEMENT);
|
||||||
this.indicesService = indicesService;
|
this.indicesService = indicesService;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected FieldStatsResponse newResponse(FieldStatsRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
|
protected FieldStatsResponse newResponse(FieldStatsRequest request, AtomicReferenceArray shardsResponses,
|
||||||
|
ClusterState clusterState) {
|
||||||
int successfulShards = 0;
|
int successfulShards = 0;
|
||||||
int failedShards = 0;
|
int failedShards = 0;
|
||||||
|
Map<String, String> conflicts = new HashMap<>();
|
||||||
Map<String, Map<String, FieldStats>> indicesMergedFieldStats = new HashMap<>();
|
Map<String, Map<String, FieldStats>> indicesMergedFieldStats = new HashMap<>();
|
||||||
List<ShardOperationFailedException> shardFailures = new ArrayList<>();
|
List<ShardOperationFailedException> shardFailures = new ArrayList<>();
|
||||||
for (int i = 0; i < shardsResponses.length(); i++) {
|
for (int i = 0; i < shardsResponses.length(); i++) {
|
||||||
|
@ -79,7 +89,9 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
|
||||||
// simply ignore non active shards
|
// simply ignore non active shards
|
||||||
} else if (shardValue instanceof BroadcastShardOperationFailedException) {
|
} else if (shardValue instanceof BroadcastShardOperationFailedException) {
|
||||||
failedShards++;
|
failedShards++;
|
||||||
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardValue));
|
shardFailures.add(
|
||||||
|
new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardValue)
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
successfulShards++;
|
successfulShards++;
|
||||||
FieldStatsShardResponse shardResponse = (FieldStatsShardResponse) shardValue;
|
FieldStatsShardResponse shardResponse = (FieldStatsShardResponse) shardValue;
|
||||||
|
@ -104,40 +116,63 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
|
||||||
FieldStats<?> existing = indexMergedFieldStats.get(entry.getKey());
|
FieldStats<?> existing = indexMergedFieldStats.get(entry.getKey());
|
||||||
if (existing != null) {
|
if (existing != null) {
|
||||||
if (existing.getType() != entry.getValue().getType()) {
|
if (existing.getType() != entry.getValue().getType()) {
|
||||||
throw new IllegalStateException(
|
if (conflicts.containsKey(entry.getKey()) == false) {
|
||||||
"trying to merge the field stats of field [" + entry.getKey() + "] from index [" + shardResponse.getIndex() + "] but the field type is incompatible, try to set the 'level' option to 'indices'"
|
conflicts.put(entry.getKey(),
|
||||||
);
|
"Field [" + entry.getKey() + "] of type [" +
|
||||||
|
FieldStats.typeName(entry.getValue().getType()) +
|
||||||
|
"] conflicts with existing field of type [" +
|
||||||
|
FieldStats.typeName(existing.getType()) +
|
||||||
|
"] in other index.");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
existing.accumulate(entry.getValue());
|
||||||
}
|
}
|
||||||
existing.append(entry.getValue());
|
|
||||||
} else {
|
} else {
|
||||||
indexMergedFieldStats.put(entry.getKey(), entry.getValue());
|
indexMergedFieldStats.put(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check the field with conflicts and remove them.
|
||||||
|
for (String conflictKey : conflicts.keySet()) {
|
||||||
|
Iterator<Map.Entry<String, Map<String, FieldStats>>> iterator =
|
||||||
|
indicesMergedFieldStats.entrySet().iterator();
|
||||||
|
while (iterator.hasNext()) {
|
||||||
|
Map.Entry<String, Map<String, FieldStats>> entry = iterator.next();
|
||||||
|
if (entry.getValue().containsKey(conflictKey)) {
|
||||||
|
entry.getValue().remove(conflictKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.getIndexConstraints().length != 0) {
|
if (request.getIndexConstraints().length != 0) {
|
||||||
Set<String> fieldStatFields = new HashSet<>(Arrays.asList(request.getFields()));
|
Set<String> fieldStatFields = new HashSet<>(Arrays.asList(request.getFields()));
|
||||||
for (IndexConstraint indexConstraint : request.getIndexConstraints()) {
|
for (IndexConstraint indexConstraint : request.getIndexConstraints()) {
|
||||||
Iterator<Map.Entry<String, Map<String, FieldStats>>> iterator = indicesMergedFieldStats.entrySet().iterator();
|
Iterator<Map.Entry<String, Map<String, FieldStats>>> iterator =
|
||||||
|
indicesMergedFieldStats.entrySet().iterator();
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
Map.Entry<String, Map<String, FieldStats>> entry = iterator.next();
|
Map.Entry<String, Map<String, FieldStats>> entry = iterator.next();
|
||||||
FieldStats indexConstraintFieldStats = entry.getValue().get(indexConstraint.getField());
|
FieldStats indexConstraintFieldStats = entry.getValue().get(indexConstraint.getField());
|
||||||
if (indexConstraintFieldStats != null && indexConstraintFieldStats.match(indexConstraint)) {
|
if (indexConstraintFieldStats != null && indexConstraintFieldStats.match(indexConstraint)) {
|
||||||
// If the field stats didn't occur in the list of fields in the original request we need to remove the
|
// If the field stats didn't occur in the list of fields in the original request
|
||||||
// field stats, because it was never requested and was only needed to validate the index constraint
|
// we need to remove the field stats, because it was never requested and was only needed to
|
||||||
|
// validate the index constraint.
|
||||||
if (fieldStatFields.contains(indexConstraint.getField()) == false) {
|
if (fieldStatFields.contains(indexConstraint.getField()) == false) {
|
||||||
entry.getValue().remove(indexConstraint.getField());
|
entry.getValue().remove(indexConstraint.getField());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// The index constraint didn't match or was empty, so we remove all the field stats of the index we're checking
|
// The index constraint didn't match or was empty,
|
||||||
|
// so we remove all the field stats of the index we're checking.
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new FieldStatsResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures, indicesMergedFieldStats);
|
return new FieldStatsResponse(shardsResponses.length(), successfulShards, failedShards,
|
||||||
|
shardFailures, indicesMergedFieldStats, conflicts);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -159,13 +194,22 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
|
||||||
IndexShard shard = indexServices.getShard(shardId.id());
|
IndexShard shard = indexServices.getShard(shardId.id());
|
||||||
try (Engine.Searcher searcher = shard.acquireSearcher("fieldstats")) {
|
try (Engine.Searcher searcher = shard.acquireSearcher("fieldstats")) {
|
||||||
for (String field : request.getFields()) {
|
for (String field : request.getFields()) {
|
||||||
MappedFieldType fieldType = mapperService.fullName(field);
|
Collection<String> matchFields;
|
||||||
if (fieldType == null) {
|
if (Regex.isSimpleMatchPattern(field)) {
|
||||||
throw new IllegalArgumentException("field [" + field + "] doesn't exist");
|
matchFields = mapperService.simpleMatchToIndexNames(field);
|
||||||
|
} else {
|
||||||
|
matchFields = Collections.singleton(field);
|
||||||
}
|
}
|
||||||
FieldStats<?> stats = fieldType.stats(searcher.reader());
|
for (String matchField : matchFields) {
|
||||||
if (stats != null) {
|
MappedFieldType fieldType = mapperService.fullName(matchField);
|
||||||
fieldStats.put(field, stats);
|
if (fieldType == null) {
|
||||||
|
// ignore.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
FieldStats<?> stats = fieldType.stats(searcher.reader());
|
||||||
|
if (stats != null) {
|
||||||
|
fieldStats.put(matchField, stats);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -175,7 +219,8 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected GroupShardsIterator shards(ClusterState clusterState, FieldStatsRequest request, String[] concreteIndices) {
|
protected GroupShardsIterator shards(ClusterState clusterState, FieldStatsRequest request,
|
||||||
|
String[] concreteIndices) {
|
||||||
return clusterService.operationRouting().searchShards(clusterState, concreteIndices, null, null);
|
return clusterService.operationRouting().searchShards(clusterState, concreteIndices, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,7 +230,8 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ClusterBlockException checkRequestBlock(ClusterState state, FieldStatsRequest request, String[] concreteIndices) {
|
protected ClusterBlockException checkRequestBlock(ClusterState state, FieldStatsRequest request,
|
||||||
|
String[] concreteIndices) {
|
||||||
return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices);
|
return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,7 +96,11 @@ public abstract class MappedFieldType extends FieldType {
|
||||||
@Override
|
@Override
|
||||||
public abstract MappedFieldType clone();
|
public abstract MappedFieldType clone();
|
||||||
|
|
||||||
/** Return a fielddata builder for this field. */
|
/** Return a fielddata builder for this field
|
||||||
|
* @throws IllegalArgumentException if the fielddata is not supported on this type.
|
||||||
|
* An IllegalArgumentException is needed in order to return an http error 400
|
||||||
|
* when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status}
|
||||||
|
**/
|
||||||
public IndexFieldData.Builder fielddataBuilder() {
|
public IndexFieldData.Builder fielddataBuilder() {
|
||||||
throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]");
|
throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]");
|
||||||
}
|
}
|
||||||
|
@ -315,6 +319,25 @@ public abstract class MappedFieldType extends FieldType {
|
||||||
return BytesRefs.toBytesRef(value);
|
return BytesRefs.toBytesRef(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Returns true if the field is searchable.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
protected boolean isSearchable() {
|
||||||
|
return indexOptions() != IndexOptions.NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns true if the field is aggregatable.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
protected boolean isAggregatable() {
|
||||||
|
try {
|
||||||
|
fielddataBuilder();
|
||||||
|
return true;
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Generates a query that will only match documents that contain the given value.
|
/** Generates a query that will only match documents that contain the given value.
|
||||||
* The default implementation returns a {@link TermQuery} over the value bytes,
|
* The default implementation returns a {@link TermQuery} over the value bytes,
|
||||||
* boosted by {@link #boost()}.
|
* boosted by {@link #boost()}.
|
||||||
|
@ -376,11 +399,13 @@ public abstract class MappedFieldType extends FieldType {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = MultiFields.getTerms(reader, name());
|
Terms terms = MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Text(maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
return new FieldStats.Text(
|
FieldStats stats = new FieldStats.Text(maxDoc, terms.getDocCount(),
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax()
|
terms.getSumDocFreq(), terms.getSumTotalTermFreq(),
|
||||||
);
|
isSearchable(), isAggregatable(),
|
||||||
|
terms.getMin(), terms.getMax());
|
||||||
|
return stats;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -411,9 +436,13 @@ public abstract class MappedFieldType extends FieldType {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** @throws IllegalArgumentException if the fielddata is not supported on this type.
|
||||||
|
* An IllegalArgumentException is needed in order to return an http error 400
|
||||||
|
* when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status}
|
||||||
|
**/
|
||||||
protected final void failIfNoDocValues() {
|
protected final void failIfNoDocValues() {
|
||||||
if (hasDocValues() == false) {
|
if (hasDocValues() == false) {
|
||||||
throw new IllegalStateException("Can't load fielddata on [" + name()
|
throw new IllegalArgumentException("Can't load fielddata on [" + name()
|
||||||
+ "] because fielddata is unsupported on fields of type ["
|
+ "] because fielddata is unsupported on fields of type ["
|
||||||
+ typeName() + "]. Use doc values instead.");
|
+ typeName() + "]. Use doc values instead.");
|
||||||
}
|
}
|
||||||
|
|
|
@ -396,15 +396,14 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
||||||
String field = name();
|
String field = name();
|
||||||
long size = PointValues.size(reader, field);
|
long size = PointValues.size(reader, field);
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
return null;
|
return new FieldStats.Date(reader.maxDoc(), isSearchable(), isAggregatable(), dateTimeFormatter());
|
||||||
}
|
}
|
||||||
int docCount = PointValues.getDocCount(reader, field);
|
int docCount = PointValues.getDocCount(reader, field);
|
||||||
byte[] min = PointValues.getMinPackedValue(reader, field);
|
byte[] min = PointValues.getMinPackedValue(reader, field);
|
||||||
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
||||||
return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size,
|
return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size,
|
||||||
LongPoint.decodeDimension(min, 0),
|
isSearchable(), isAggregatable(),
|
||||||
LongPoint.decodeDimension(max, 0),
|
dateTimeFormatter(), LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
|
||||||
dateTimeFormatter());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -170,17 +170,17 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldStats stats(IndexReader reader) throws IOException {
|
public FieldStats.Long stats(IndexReader reader) throws IOException {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Long(maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
long minValue = LegacyNumericUtils.getMinInt(terms);
|
long minValue = LegacyNumericUtils.getMinInt(terms);
|
||||||
long maxValue = LegacyNumericUtils.getMaxInt(terms);
|
long maxValue = LegacyNumericUtils.getMaxInt(terms);
|
||||||
return new FieldStats.Long(
|
return new FieldStats.Long(maxDoc, terms.getDocCount(),
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
|
terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(),
|
||||||
);
|
minValue, maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -375,17 +375,17 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldStats stats(IndexReader reader) throws IOException {
|
public FieldStats.Date stats(IndexReader reader) throws IOException {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Date(maxDoc, isSearchable(), isAggregatable(), dateTimeFormatter());
|
||||||
}
|
}
|
||||||
long minValue = LegacyNumericUtils.getMinLong(terms);
|
long minValue = LegacyNumericUtils.getMinLong(terms);
|
||||||
long maxValue = LegacyNumericUtils.getMaxLong(terms);
|
long maxValue = LegacyNumericUtils.getMaxLong(terms);
|
||||||
return new FieldStats.Date(
|
return new FieldStats.Date(maxDoc, terms.getDocCount(),
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter()
|
terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(),
|
||||||
);
|
dateTimeFormatter(), minValue, maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
||||||
|
|
|
@ -136,12 +136,12 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double nullValue() {
|
public java.lang.Double nullValue() {
|
||||||
return (Double)super.nullValue();
|
return (java.lang.Double)super.nullValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Double valueForSearch(Object value) {
|
public java.lang.Double valueForSearch(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -151,7 +151,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
||||||
if (value instanceof BytesRef) {
|
if (value instanceof BytesRef) {
|
||||||
return Numbers.bytesToDouble((BytesRef) value);
|
return Numbers.bytesToDouble((BytesRef) value);
|
||||||
}
|
}
|
||||||
return Double.parseDouble(value.toString());
|
return java.lang.Double.parseDouble(value.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -181,17 +181,17 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldStats stats(IndexReader reader) throws IOException {
|
public FieldStats.Double stats(IndexReader reader) throws IOException {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Double(maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
double minValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms));
|
double minValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms));
|
||||||
double maxValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms));
|
double maxValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms));
|
||||||
return new FieldStats.Double(
|
return new FieldStats.Double(maxDoc, terms.getDocCount(),
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
|
terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(),
|
||||||
);
|
minValue, maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -235,13 +235,13 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
value = fieldType().nullValue();
|
value = fieldType().nullValue();
|
||||||
} else {
|
} else {
|
||||||
value = Double.parseDouble(sExternalValue);
|
value = java.lang.Double.parseDouble(sExternalValue);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
value = ((Number) externalValue).doubleValue();
|
value = ((Number) externalValue).doubleValue();
|
||||||
}
|
}
|
||||||
if (context.includeInAll(includeInAll, this)) {
|
if (context.includeInAll(includeInAll, this)) {
|
||||||
context.allEntries().addText(fieldType().name(), Double.toString(value), boost);
|
context.allEntries().addText(fieldType().name(), java.lang.Double.toString(value), boost);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
XContentParser parser = context.parser();
|
XContentParser parser = context.parser();
|
||||||
|
@ -258,7 +258,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
||||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
Double objValue = fieldType().nullValue();
|
java.lang.Double objValue = fieldType().nullValue();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
|
@ -341,7 +341,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String numericAsString() {
|
public String numericAsString() {
|
||||||
return Double.toString(number);
|
return java.lang.Double.toString(number);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -166,17 +166,16 @@ public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldStats stats(IndexReader reader) throws IOException {
|
public FieldStats.Double stats(IndexReader reader) throws IOException {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Double(maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
float minValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms));
|
float minValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms));
|
||||||
float maxValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms));
|
float maxValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms));
|
||||||
return new FieldStats.Double(
|
return new FieldStats.Double(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(),
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
|
isSearchable(), isAggregatable(), minValue, maxValue);
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -170,17 +170,17 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldStats stats(IndexReader reader) throws IOException {
|
public FieldStats.Long stats(IndexReader reader) throws IOException {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Long(maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
long minValue = LegacyNumericUtils.getMinInt(terms);
|
long minValue = LegacyNumericUtils.getMinInt(terms);
|
||||||
long maxValue = LegacyNumericUtils.getMaxInt(terms);
|
long maxValue = LegacyNumericUtils.getMaxInt(terms);
|
||||||
return new FieldStats.Long(
|
return new FieldStats.Long(
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
|
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(),
|
||||||
);
|
isSearchable(), isAggregatable(), minValue, maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -173,13 +173,14 @@ public class LegacyLongFieldMapper extends LegacyNumberFieldMapper {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Long(
|
||||||
|
maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
long minValue = LegacyNumericUtils.getMinLong(terms);
|
long minValue = LegacyNumericUtils.getMinLong(terms);
|
||||||
long maxValue = LegacyNumericUtils.getMaxLong(terms);
|
long maxValue = LegacyNumericUtils.getMaxLong(terms);
|
||||||
return new FieldStats.Long(
|
return new FieldStats.Long(
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
|
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(),
|
||||||
);
|
isSearchable(), isAggregatable(), minValue, maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -174,17 +174,17 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldStats stats(IndexReader reader) throws IOException {
|
public FieldStats.Long stats(IndexReader reader) throws IOException {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Long(maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
long minValue = LegacyNumericUtils.getMinInt(terms);
|
long minValue = LegacyNumericUtils.getMinInt(terms);
|
||||||
long maxValue = LegacyNumericUtils.getMaxInt(terms);
|
long maxValue = LegacyNumericUtils.getMaxInt(terms);
|
||||||
return new FieldStats.Long(
|
return new FieldStats.Long(
|
||||||
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
|
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(),
|
||||||
);
|
isSearchable(), isAggregatable(), minValue, maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -120,7 +120,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
@Override
|
@Override
|
||||||
public NumberFieldMapper build(BuilderContext context) {
|
public NumberFieldMapper build(BuilderContext context) {
|
||||||
setupFieldType(context);
|
setupFieldType(context);
|
||||||
NumberFieldMapper fieldMapper = new NumberFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
NumberFieldMapper fieldMapper =
|
||||||
|
new NumberFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||||
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||||
return (NumberFieldMapper) fieldMapper.includeInAll(includeInAll);
|
return (NumberFieldMapper) fieldMapper.includeInAll(includeInAll);
|
||||||
}
|
}
|
||||||
|
@ -135,7 +136,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node,
|
||||||
|
ParserContext parserContext) throws MapperParsingException {
|
||||||
if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) {
|
if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case BYTE:
|
case BYTE:
|
||||||
|
@ -212,7 +214,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||||
|
boolean includeLower, boolean includeUpper) {
|
||||||
float l = Float.NEGATIVE_INFINITY;
|
float l = Float.NEGATIVE_INFINITY;
|
||||||
float u = Float.POSITIVE_INFINITY;
|
float u = Float.POSITIVE_INFINITY;
|
||||||
if (lowerTerm != null) {
|
if (lowerTerm != null) {
|
||||||
|
@ -238,13 +241,15 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) {
|
public List<Field> createFields(String name, Number value,
|
||||||
|
boolean indexed, boolean docValued, boolean stored) {
|
||||||
List<Field> fields = new ArrayList<>();
|
List<Field> fields = new ArrayList<>();
|
||||||
if (indexed) {
|
if (indexed) {
|
||||||
fields.add(new FloatPoint(name, value.floatValue()));
|
fields.add(new FloatPoint(name, value.floatValue()));
|
||||||
}
|
}
|
||||||
if (docValued) {
|
if (docValued) {
|
||||||
fields.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value.floatValue())));
|
fields.add(new SortedNumericDocValuesField(name,
|
||||||
|
NumericUtils.floatToSortableInt(value.floatValue())));
|
||||||
}
|
}
|
||||||
if (stored) {
|
if (stored) {
|
||||||
fields.add(new StoredField(name, value.floatValue()));
|
fields.add(new StoredField(name, value.floatValue()));
|
||||||
|
@ -253,17 +258,18 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
FieldStats.Double stats(IndexReader reader, String field) throws IOException {
|
FieldStats.Double stats(IndexReader reader, String fieldName,
|
||||||
long size = PointValues.size(reader, field);
|
boolean isSearchable, boolean isAggregatable) throws IOException {
|
||||||
|
long size = PointValues.size(reader, fieldName);
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
return null;
|
return new FieldStats.Double(reader.maxDoc(), isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
int docCount = PointValues.getDocCount(reader, field);
|
int docCount = PointValues.getDocCount(reader, fieldName);
|
||||||
byte[] min = PointValues.getMinPackedValue(reader, field);
|
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
|
||||||
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
|
||||||
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
|
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
|
||||||
FloatPoint.decodeDimension(min, 0),
|
isSearchable, isAggregatable,
|
||||||
FloatPoint.decodeDimension(max, 0));
|
FloatPoint.decodeDimension(min, 0), FloatPoint.decodeDimension(max, 0));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
DOUBLE("double", NumericType.DOUBLE) {
|
DOUBLE("double", NumericType.DOUBLE) {
|
||||||
|
@ -299,7 +305,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||||
|
boolean includeLower, boolean includeUpper) {
|
||||||
double l = Double.NEGATIVE_INFINITY;
|
double l = Double.NEGATIVE_INFINITY;
|
||||||
double u = Double.POSITIVE_INFINITY;
|
double u = Double.POSITIVE_INFINITY;
|
||||||
if (lowerTerm != null) {
|
if (lowerTerm != null) {
|
||||||
|
@ -325,13 +332,15 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) {
|
public List<Field> createFields(String name, Number value,
|
||||||
|
boolean indexed, boolean docValued, boolean stored) {
|
||||||
List<Field> fields = new ArrayList<>();
|
List<Field> fields = new ArrayList<>();
|
||||||
if (indexed) {
|
if (indexed) {
|
||||||
fields.add(new DoublePoint(name, value.doubleValue()));
|
fields.add(new DoublePoint(name, value.doubleValue()));
|
||||||
}
|
}
|
||||||
if (docValued) {
|
if (docValued) {
|
||||||
fields.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value.doubleValue())));
|
fields.add(new SortedNumericDocValuesField(name,
|
||||||
|
NumericUtils.doubleToSortableLong(value.doubleValue())));
|
||||||
}
|
}
|
||||||
if (stored) {
|
if (stored) {
|
||||||
fields.add(new StoredField(name, value.doubleValue()));
|
fields.add(new StoredField(name, value.doubleValue()));
|
||||||
|
@ -340,17 +349,18 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
FieldStats.Double stats(IndexReader reader, String field) throws IOException {
|
FieldStats.Double stats(IndexReader reader, String fieldName,
|
||||||
long size = PointValues.size(reader, field);
|
boolean isSearchable, boolean isAggregatable) throws IOException {
|
||||||
|
long size = PointValues.size(reader, fieldName);
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
return null;
|
return new FieldStats.Double(reader.maxDoc(), isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
int docCount = PointValues.getDocCount(reader, field);
|
int docCount = PointValues.getDocCount(reader, fieldName);
|
||||||
byte[] min = PointValues.getMinPackedValue(reader, field);
|
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
|
||||||
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
|
||||||
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
|
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
|
||||||
DoublePoint.decodeDimension(min, 0),
|
isSearchable, isAggregatable,
|
||||||
DoublePoint.decodeDimension(max, 0));
|
DoublePoint.decodeDimension(min, 0), DoublePoint.decodeDimension(max, 0));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
BYTE("byte", NumericType.BYTE) {
|
BYTE("byte", NumericType.BYTE) {
|
||||||
|
@ -385,7 +395,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||||
|
boolean includeLower, boolean includeUpper) {
|
||||||
return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper);
|
return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -395,13 +406,15 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) {
|
public List<Field> createFields(String name, Number value,
|
||||||
|
boolean indexed, boolean docValued, boolean stored) {
|
||||||
return INTEGER.createFields(name, value, indexed, docValued, stored);
|
return INTEGER.createFields(name, value, indexed, docValued, stored);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
FieldStats.Long stats(IndexReader reader, String field) throws IOException {
|
FieldStats.Long stats(IndexReader reader, String fieldName,
|
||||||
return (FieldStats.Long) INTEGER.stats(reader, field);
|
boolean isSearchable, boolean isAggregatable) throws IOException {
|
||||||
|
return (FieldStats.Long) INTEGER.stats(reader, fieldName, isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -441,7 +454,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||||
|
boolean includeLower, boolean includeUpper) {
|
||||||
return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper);
|
return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -451,13 +465,15 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) {
|
public List<Field> createFields(String name, Number value,
|
||||||
|
boolean indexed, boolean docValued, boolean stored) {
|
||||||
return INTEGER.createFields(name, value, indexed, docValued, stored);
|
return INTEGER.createFields(name, value, indexed, docValued, stored);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
FieldStats.Long stats(IndexReader reader, String field) throws IOException {
|
FieldStats.Long stats(IndexReader reader, String fieldName,
|
||||||
return (FieldStats.Long) INTEGER.stats(reader, field);
|
boolean isSearchable, boolean isAggregatable) throws IOException {
|
||||||
|
return (FieldStats.Long) INTEGER.stats(reader, fieldName, isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -498,7 +514,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||||
|
boolean includeLower, boolean includeUpper) {
|
||||||
int l = Integer.MIN_VALUE;
|
int l = Integer.MIN_VALUE;
|
||||||
int u = Integer.MAX_VALUE;
|
int u = Integer.MAX_VALUE;
|
||||||
if (lowerTerm != null) {
|
if (lowerTerm != null) {
|
||||||
|
@ -530,7 +547,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) {
|
public List<Field> createFields(String name, Number value,
|
||||||
|
boolean indexed, boolean docValued, boolean stored) {
|
||||||
List<Field> fields = new ArrayList<>();
|
List<Field> fields = new ArrayList<>();
|
||||||
if (indexed) {
|
if (indexed) {
|
||||||
fields.add(new IntPoint(name, value.intValue()));
|
fields.add(new IntPoint(name, value.intValue()));
|
||||||
|
@ -545,17 +563,18 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
FieldStats.Long stats(IndexReader reader, String field) throws IOException {
|
FieldStats.Long stats(IndexReader reader, String fieldName,
|
||||||
long size = PointValues.size(reader, field);
|
boolean isSearchable, boolean isAggregatable) throws IOException {
|
||||||
|
long size = PointValues.size(reader, fieldName);
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
return null;
|
return new FieldStats.Long(reader.maxDoc(), isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
int docCount = PointValues.getDocCount(reader, field);
|
int docCount = PointValues.getDocCount(reader, fieldName);
|
||||||
byte[] min = PointValues.getMinPackedValue(reader, field);
|
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
|
||||||
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
|
||||||
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
|
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
|
||||||
IntPoint.decodeDimension(min, 0),
|
isSearchable, isAggregatable,
|
||||||
IntPoint.decodeDimension(max, 0));
|
IntPoint.decodeDimension(min, 0), IntPoint.decodeDimension(max, 0));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
LONG("long", NumericType.LONG) {
|
LONG("long", NumericType.LONG) {
|
||||||
|
@ -591,7 +610,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||||
|
boolean includeLower, boolean includeUpper) {
|
||||||
long l = Long.MIN_VALUE;
|
long l = Long.MIN_VALUE;
|
||||||
long u = Long.MAX_VALUE;
|
long u = Long.MAX_VALUE;
|
||||||
if (lowerTerm != null) {
|
if (lowerTerm != null) {
|
||||||
|
@ -623,7 +643,8 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored) {
|
public List<Field> createFields(String name, Number value,
|
||||||
|
boolean indexed, boolean docValued, boolean stored) {
|
||||||
List<Field> fields = new ArrayList<>();
|
List<Field> fields = new ArrayList<>();
|
||||||
if (indexed) {
|
if (indexed) {
|
||||||
fields.add(new LongPoint(name, value.longValue()));
|
fields.add(new LongPoint(name, value.longValue()));
|
||||||
|
@ -638,17 +659,18 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
FieldStats.Long stats(IndexReader reader, String field) throws IOException {
|
FieldStats.Long stats(IndexReader reader, String fieldName,
|
||||||
long size = PointValues.size(reader, field);
|
boolean isSearchable, boolean isAggregatable) throws IOException {
|
||||||
|
long size = PointValues.size(reader, fieldName);
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
return null;
|
return new FieldStats.Long(reader.maxDoc(), isSearchable, isAggregatable);
|
||||||
}
|
}
|
||||||
int docCount = PointValues.getDocCount(reader, field);
|
int docCount = PointValues.getDocCount(reader, fieldName);
|
||||||
byte[] min = PointValues.getMinPackedValue(reader, field);
|
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
|
||||||
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
|
||||||
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
|
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
|
||||||
LongPoint.decodeDimension(min, 0),
|
isSearchable, isAggregatable,
|
||||||
LongPoint.decodeDimension(max, 0));
|
LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -670,12 +692,15 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
abstract Query termQuery(String field, Object value);
|
abstract Query termQuery(String field, Object value);
|
||||||
abstract Query termsQuery(String field, List<Object> values);
|
abstract Query termsQuery(String field, List<Object> values);
|
||||||
abstract Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper);
|
abstract Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||||
|
boolean includeLower, boolean includeUpper);
|
||||||
abstract Query fuzzyQuery(String field, Object value, Fuzziness fuzziness);
|
abstract Query fuzzyQuery(String field, Object value, Fuzziness fuzziness);
|
||||||
abstract Number parse(XContentParser parser, boolean coerce) throws IOException;
|
abstract Number parse(XContentParser parser, boolean coerce) throws IOException;
|
||||||
abstract Number parse(Object value);
|
abstract Number parse(Object value);
|
||||||
public abstract List<Field> createFields(String name, Number value, boolean indexed, boolean docValued, boolean stored);
|
public abstract List<Field> createFields(String name, Number value, boolean indexed,
|
||||||
abstract FieldStats<? extends Number> stats(IndexReader reader, String field) throws IOException;
|
boolean docValued, boolean stored);
|
||||||
|
abstract FieldStats<? extends Number> stats(IndexReader reader, String fieldName,
|
||||||
|
boolean isSearchable, boolean isAggregatable) throws IOException;
|
||||||
Number valueForSearch(Number value) {
|
Number valueForSearch(Number value) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
@ -736,13 +761,14 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength,
|
||||||
|
int maxExpansions, boolean transpositions) {
|
||||||
return type.fuzzyQuery(name(), value, fuzziness);
|
return type.fuzzyQuery(name(), value, fuzziness);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldStats stats(IndexReader reader) throws IOException {
|
public FieldStats stats(IndexReader reader) throws IOException {
|
||||||
return type.stats(reader, name());
|
return type.stats(reader, name(), isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -478,7 +478,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||||
} else if (fielddata) {
|
} else if (fielddata) {
|
||||||
return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
|
return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalStateException("Fielddata is disabled on analyzed string fields by default. Set fielddata=true on ["
|
throw new IllegalArgumentException("Fielddata is disabled on analyzed string fields by default. Set fielddata=true on ["
|
||||||
+ name() + "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however "
|
+ name() + "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however "
|
||||||
+ "use significant memory.");
|
+ "use significant memory.");
|
||||||
}
|
}
|
||||||
|
|
|
@ -294,7 +294,7 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
||||||
@Override
|
@Override
|
||||||
public IndexFieldData.Builder fielddataBuilder() {
|
public IndexFieldData.Builder fielddataBuilder() {
|
||||||
if (fielddata == false) {
|
if (fielddata == false) {
|
||||||
throw new IllegalStateException("Fielddata is disabled on text fields by default. Set fielddata=true on [" + name()
|
throw new IllegalArgumentException("Fielddata is disabled on text fields by default. Set fielddata=true on [" + name()
|
||||||
+ "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however "
|
+ "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however "
|
||||||
+ "use significant memory.");
|
+ "use significant memory.");
|
||||||
}
|
}
|
||||||
|
|
|
@ -128,6 +128,12 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
||||||
return CONTENT_TYPE;
|
return CONTENT_TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isSearchable() {
|
||||||
|
// The _id field is always searchable.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||||
if (indexOptions() != IndexOptions.NONE || context == null) {
|
if (indexOptions() != IndexOptions.NONE || context == null) {
|
||||||
|
|
|
@ -123,6 +123,12 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
||||||
return CONTENT_TYPE;
|
return CONTENT_TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isSearchable() {
|
||||||
|
// The _index field is always searchable.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This termQuery impl looks at the context to determine the index that
|
* This termQuery impl looks at the context to determine the index that
|
||||||
* is being queried and then returns a MATCH_ALL_QUERY or MATCH_NO_QUERY
|
* is being queried and then returns a MATCH_ALL_QUERY or MATCH_NO_QUERY
|
||||||
|
@ -141,8 +147,6 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
||||||
return Queries.newMatchNoDocsQuery();
|
return Queries.newMatchNoDocsQuery();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query termsQuery(List values, QueryShardContext context) {
|
public Query termsQuery(List values, QueryShardContext context) {
|
||||||
|
|
|
@ -229,14 +229,14 @@ public class IpFieldMapper extends FieldMapper implements AllFieldMapper.Include
|
||||||
String field = name();
|
String field = name();
|
||||||
long size = PointValues.size(reader, field);
|
long size = PointValues.size(reader, field);
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
return null;
|
return new FieldStats.Ip(reader.maxDoc(), isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
int docCount = PointValues.getDocCount(reader, field);
|
int docCount = PointValues.getDocCount(reader, field);
|
||||||
byte[] min = PointValues.getMinPackedValue(reader, field);
|
byte[] min = PointValues.getMinPackedValue(reader, field);
|
||||||
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
byte[] max = PointValues.getMaxPackedValue(reader, field);
|
||||||
return new FieldStats.Ip(reader.maxDoc(),docCount, -1L, size,
|
return new FieldStats.Ip(reader.maxDoc(), docCount, -1L, size,
|
||||||
InetAddressPoint.decode(min),
|
isSearchable(), isAggregatable(),
|
||||||
InetAddressPoint.decode(max));
|
InetAddressPoint.decode(min), InetAddressPoint.decode(max));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -252,14 +252,14 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
|
||||||
int maxDoc = reader.maxDoc();
|
int maxDoc = reader.maxDoc();
|
||||||
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name());
|
||||||
if (terms == null) {
|
if (terms == null) {
|
||||||
return null;
|
return new FieldStats.Ip(maxDoc, isSearchable(), isAggregatable());
|
||||||
}
|
}
|
||||||
long minValue = LegacyNumericUtils.getMinLong(terms);
|
long minValue = LegacyNumericUtils.getMinLong(terms);
|
||||||
long maxValue = LegacyNumericUtils.getMaxLong(terms);
|
long maxValue = LegacyNumericUtils.getMaxLong(terms);
|
||||||
return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(),
|
return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(),
|
||||||
terms.getSumTotalTermFreq(),
|
isSearchable(), isAggregatable(),
|
||||||
InetAddress.getByName(longToIp(minValue)),
|
InetAddress.getByName(longToIp(minValue)),
|
||||||
InetAddress.getByName(longToIp(maxValue)));
|
InetAddress.getByName(longToIp(maxValue)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -58,9 +58,11 @@ public class RestFieldStatsAction extends BaseRestHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception {
|
public void handleRequest(final RestRequest request,
|
||||||
|
final RestChannel channel, final Client client) throws Exception {
|
||||||
if (RestActions.hasBodyContent(request) && request.hasParam("fields")) {
|
if (RestActions.hasBodyContent(request) && request.hasParam("fields")) {
|
||||||
throw new IllegalArgumentException("can't specify a request body and [fields] request parameter, either specify a request body or the [fields] request parameter");
|
throw new IllegalArgumentException("can't specify a request body and [fields] request parameter, " +
|
||||||
|
"either specify a request body or the [fields] request parameter");
|
||||||
}
|
}
|
||||||
|
|
||||||
final FieldStatsRequest fieldStatsRequest = new FieldStatsRequest();
|
final FieldStatsRequest fieldStatsRequest = new FieldStatsRequest();
|
||||||
|
@ -80,7 +82,8 @@ public class RestFieldStatsAction extends BaseRestHandler {
|
||||||
buildBroadcastShardsHeader(builder, request, response);
|
buildBroadcastShardsHeader(builder, request, response);
|
||||||
|
|
||||||
builder.startObject("indices");
|
builder.startObject("indices");
|
||||||
for (Map.Entry<String, Map<String, FieldStats>> entry1 : response.getIndicesMergedFieldStats().entrySet()) {
|
for (Map.Entry<String, Map<String, FieldStats>> entry1 :
|
||||||
|
response.getIndicesMergedFieldStats().entrySet()) {
|
||||||
builder.startObject(entry1.getKey());
|
builder.startObject(entry1.getKey());
|
||||||
builder.startObject("fields");
|
builder.startObject("fields");
|
||||||
for (Map.Entry<String, FieldStats> entry2 : entry1.getValue().entrySet()) {
|
for (Map.Entry<String, FieldStats> entry2 : entry1.getValue().entrySet()) {
|
||||||
|
@ -91,6 +94,12 @@ public class RestFieldStatsAction extends BaseRestHandler {
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
if (response.getConflicts().size() > 0) {
|
||||||
|
builder.startObject("conflicts");
|
||||||
|
for (Map.Entry<String, String> entry : response.getConflicts().entrySet()) {
|
||||||
|
builder.field(entry.getKey(), entry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
return new BytesRestResponse(RestStatus.OK, builder);
|
return new BytesRestResponse(RestStatus.OK, builder);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -34,7 +34,9 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
public class FieldStatsRequestTests extends ESTestCase {
|
public class FieldStatsRequestTests extends ESTestCase {
|
||||||
|
|
||||||
public void testFieldsParsing() throws Exception {
|
public void testFieldsParsing() throws Exception {
|
||||||
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/fieldstats-index-constraints-request.json");
|
byte[] data =
|
||||||
|
StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/" +
|
||||||
|
"fieldstats-index-constraints-request.json");
|
||||||
FieldStatsRequest request = new FieldStatsRequest();
|
FieldStatsRequest request = new FieldStatsRequest();
|
||||||
request.source(new BytesArray(data));
|
request.source(new BytesArray(data));
|
||||||
|
|
||||||
|
|
|
@ -20,11 +20,14 @@
|
||||||
package org.elasticsearch.fieldstats;
|
package org.elasticsearch.fieldstats;
|
||||||
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||||
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
|
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
|
||||||
import org.elasticsearch.action.fieldstats.IndexConstraint;
|
import org.elasticsearch.action.fieldstats.IndexConstraint;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -47,11 +50,45 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
public void testRandom() throws Exception {
|
public void testRandom() throws Exception {
|
||||||
assertAcked(prepareCreate("test").addMapping(
|
assertAcked(prepareCreate("test").addMapping(
|
||||||
"test", "string", "type=text", "date", "type=date", "double", "type=double", "double", "type=double",
|
"test",
|
||||||
"float", "type=float", "long", "type=long", "integer", "type=integer", "short", "type=short", "byte", "type=byte"
|
"string", "type=text",
|
||||||
));
|
"date", "type=date",
|
||||||
|
"double", "type=double",
|
||||||
|
"float", "type=float",
|
||||||
|
"long", "type=long",
|
||||||
|
"integer", "type=integer",
|
||||||
|
"short", "type=short",
|
||||||
|
"byte", "type=byte"));
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
|
|
||||||
|
// index=false
|
||||||
|
assertAcked(prepareCreate("test1").addMapping(
|
||||||
|
"test",
|
||||||
|
"string", "type=text,index=false",
|
||||||
|
"date", "type=date,index=false",
|
||||||
|
"double", "type=double,index=false",
|
||||||
|
"float", "type=float,index=false",
|
||||||
|
"long", "type=long,index=false",
|
||||||
|
"integer", "type=integer,index=false",
|
||||||
|
"short", "type=short,index=false",
|
||||||
|
"byte", "type=byte,index=false"
|
||||||
|
));
|
||||||
|
ensureGreen("test1");
|
||||||
|
|
||||||
|
// no value indexed
|
||||||
|
assertAcked(prepareCreate("test3").addMapping(
|
||||||
|
"test",
|
||||||
|
"string", "type=text,index=false",
|
||||||
|
"date", "type=date,index=false",
|
||||||
|
"double", "type=double,index=false",
|
||||||
|
"float", "type=float,index=false",
|
||||||
|
"long", "type=long,index=false",
|
||||||
|
"integer", "type=integer,index=false",
|
||||||
|
"short", "type=short,index=false",
|
||||||
|
"byte", "type=byte,index=false"
|
||||||
|
));
|
||||||
|
ensureGreen("test3");
|
||||||
|
|
||||||
long minByte = Byte.MAX_VALUE;
|
long minByte = Byte.MAX_VALUE;
|
||||||
long maxByte = Byte.MIN_VALUE;
|
long maxByte = Byte.MIN_VALUE;
|
||||||
long minShort = Short.MAX_VALUE;
|
long minShort = Short.MAX_VALUE;
|
||||||
|
@ -97,12 +134,20 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
request.add(client().prepareIndex("test", "test", Integer.toString(doc))
|
request.add(client().prepareIndex("test", "test", Integer.toString(doc))
|
||||||
.setSource("byte", b, "short", s, "integer", i, "long", l, "float", f, "double", d, "string", str)
|
.setSource("byte", b,
|
||||||
|
"short", s,
|
||||||
|
"integer", i,
|
||||||
|
"long", l,
|
||||||
|
"float", f,
|
||||||
|
"double", d,
|
||||||
|
"string", str)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
indexRandom(true, false, request);
|
indexRandom(true, false, request);
|
||||||
|
|
||||||
FieldStatsResponse response = client().prepareFieldStats().setFields("byte", "short", "integer", "long", "float", "double", "string").get();
|
FieldStatsResponse response = client()
|
||||||
|
.prepareFieldStats()
|
||||||
|
.setFields("byte", "short", "integer", "long", "float", "double", "string").get();
|
||||||
assertAllSuccessful(response);
|
assertAllSuccessful(response);
|
||||||
|
|
||||||
for (FieldStats stats : response.getAllFieldStats().values()) {
|
for (FieldStats stats : response.getAllFieldStats().values()) {
|
||||||
|
@ -180,12 +225,12 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIncompatibleFieldTypes() {
|
public void testIncompatibleFieldTypesSingleField() {
|
||||||
assertAcked(prepareCreate("test1").addMapping(
|
assertAcked(prepareCreate("test1").addMapping(
|
||||||
"test", "value", "type=long"
|
"test", "value", "type=long"
|
||||||
));
|
));
|
||||||
assertAcked(prepareCreate("test2").addMapping(
|
assertAcked(prepareCreate("test2").addMapping(
|
||||||
"test", "value", "type=text"
|
"test", "value", "type=text"
|
||||||
));
|
));
|
||||||
ensureGreen("test1", "test2");
|
ensureGreen("test1", "test2");
|
||||||
|
|
||||||
|
@ -195,20 +240,64 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
client().prepareIndex("test2", "test").setSource("value", "b").get();
|
client().prepareIndex("test2", "test").setSource("value", "b").get();
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
try {
|
FieldStatsResponse response = client().prepareFieldStats().setFields("value", "value2").get();
|
||||||
client().prepareFieldStats().setFields("value").get();
|
assertAllSuccessful(response);
|
||||||
fail();
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
} catch (IllegalStateException e){
|
assertThat(response.getIndicesMergedFieldStats().get("_all").size(), equalTo(0));
|
||||||
assertThat(e.getMessage(), containsString("trying to merge the field stats of field [value]"));
|
assertThat(response.getConflicts().size(), equalTo(1));
|
||||||
}
|
assertThat(response.getConflicts().get("value"),
|
||||||
|
equalTo("Field [value] of type [text] conflicts with existing field of type [whole-number] " +
|
||||||
|
"in other index."));
|
||||||
|
|
||||||
FieldStatsResponse response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
|
response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
|
||||||
assertAllSuccessful(response);
|
assertAllSuccessful(response);
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2L));
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2L));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(new BytesRef("a")));
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(new BytesRef("b")));
|
equalTo(new BytesRef("a")));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(),
|
||||||
|
equalTo(new BytesRef("b")));
|
||||||
|
}
|
||||||
|
|
||||||
|
@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-7257")
|
||||||
|
public void testIncompatibleFieldTypesMultipleFields() {
|
||||||
|
assertAcked(prepareCreate("test1").addMapping(
|
||||||
|
"test", "value", "type=long", "value2", "type=long"
|
||||||
|
));
|
||||||
|
assertAcked(prepareCreate("test2").addMapping(
|
||||||
|
"test", "value", "type=text", "value2", "type=long"
|
||||||
|
));
|
||||||
|
ensureGreen("test1", "test2");
|
||||||
|
|
||||||
|
client().prepareIndex("test1", "test").setSource("value", 1L, "value2", 1L).get();
|
||||||
|
client().prepareIndex("test1", "test").setSource("value", 2L).get();
|
||||||
|
client().prepareIndex("test2", "test").setSource("value", "a").get();
|
||||||
|
client().prepareIndex("test2", "test").setSource("value", "b").get();
|
||||||
|
refresh();
|
||||||
|
|
||||||
|
FieldStatsResponse response = client().prepareFieldStats().setFields("value", "value2").get();
|
||||||
|
assertAllSuccessful(response);
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("_all").size(), equalTo(1));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value2").getMinValue(), equalTo(1L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value2").getMaxValue(), equalTo(1L));
|
||||||
|
assertThat(response.getConflicts().size(), equalTo(1));
|
||||||
|
assertThat(response.getConflicts().get("value"),
|
||||||
|
equalTo("Field [value] of type [text] conflicts with existing field of type [whole-number] " +
|
||||||
|
"in other index."));
|
||||||
|
|
||||||
|
response = client().prepareFieldStats().setFields("value", "value2").setLevel("indices").get();
|
||||||
|
assertAllSuccessful(response);
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value2").getMinValue(), equalTo(1L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value2").getMaxValue(), equalTo(1L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
|
||||||
|
equalTo(new BytesRef("a")));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(),
|
||||||
|
equalTo(new BytesRef("b")));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFieldStatsFiltering() throws Exception {
|
public void testFieldStatsFiltering() throws Exception {
|
||||||
|
@ -229,7 +318,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
FieldStatsResponse response = client().prepareFieldStats()
|
FieldStatsResponse response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "200"), new IndexConstraint("value", MAX , LTE, "300"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "200"),
|
||||||
|
new IndexConstraint("value", MAX , LTE, "300"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertAllSuccessful(response);
|
assertAllSuccessful(response);
|
||||||
|
@ -266,7 +356,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-20"), new IndexConstraint("value", MAX, LT, "-10"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-20"),
|
||||||
|
new IndexConstraint("value", MAX, LT, "-10"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertAllSuccessful(response);
|
assertAllSuccessful(response);
|
||||||
|
@ -275,7 +366,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-100"), new IndexConstraint("value", MAX, LTE, "-20"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-100"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "-20"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertAllSuccessful(response);
|
assertAllSuccessful(response);
|
||||||
|
@ -284,7 +376,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "100"), new IndexConstraint("value", MAX, LTE, "200"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "100"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "200"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertAllSuccessful(response);
|
assertAllSuccessful(response);
|
||||||
|
@ -295,7 +388,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "150"), new IndexConstraint("value", MAX, LTE, "300"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "150"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "300"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertAllSuccessful(response);
|
assertAllSuccessful(response);
|
||||||
|
@ -322,6 +416,38 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testWildcardFields() throws Exception {
|
||||||
|
assertAcked(prepareCreate("test1").addMapping(
|
||||||
|
"test", "foo", "type=long", "foobar", "type=text", "barfoo", "type=long"
|
||||||
|
));
|
||||||
|
assertAcked(prepareCreate("test2").addMapping(
|
||||||
|
"test", "foobar", "type=text", "barfoo", "type=long"
|
||||||
|
));
|
||||||
|
ensureGreen("test1", "test2");
|
||||||
|
FieldStatsResponse response = client().prepareFieldStats()
|
||||||
|
.setFields("foo*")
|
||||||
|
.get();
|
||||||
|
assertAllSuccessful(response);
|
||||||
|
assertThat(response.getAllFieldStats().size(), equalTo(2));
|
||||||
|
assertThat(response.getAllFieldStats().get("foo").getMinValue(), nullValue());
|
||||||
|
assertThat(response.getAllFieldStats().get("foobar").getMaxValue(), nullValue());
|
||||||
|
|
||||||
|
response = client().prepareFieldStats()
|
||||||
|
.setFields("foo*")
|
||||||
|
.setLevel("indices")
|
||||||
|
.get();
|
||||||
|
assertAllSuccessful(response);
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").size(), equalTo(2));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foo").getMinValue(), nullValue());
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foo").getMaxValue(), nullValue());
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foobar").getMinValue(), nullValue());
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foobar").getMaxValue(), nullValue());
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").size(), equalTo(1));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("foobar").getMinValue(), nullValue());
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("foobar").getMaxValue(), nullValue());
|
||||||
|
}
|
||||||
|
|
||||||
private void indexRange(String index, long from, long to) throws Exception {
|
private void indexRange(String index, long from, long to) throws Exception {
|
||||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||||
for (long value = from; value <= to; value++) {
|
for (long value = from; value <= to; value++) {
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.action.fieldstats.FieldStatsResponse;
|
||||||
import org.elasticsearch.action.fieldstats.IndexConstraint;
|
import org.elasticsearch.action.fieldstats.IndexConstraint;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper;
|
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
@ -41,13 +40,11 @@ import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE
|
||||||
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX;
|
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX;
|
||||||
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN;
|
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.either;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class FieldStatsTests extends ESSingleNodeTestCase {
|
public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
public void testByte() {
|
public void testByte() {
|
||||||
testNumberRange("field1", "byte", 12, 18);
|
testNumberRange("field1", "byte", 12, 18);
|
||||||
testNumberRange("field1", "byte", -5, 5);
|
testNumberRange("field1", "byte", -5, 5);
|
||||||
|
@ -75,7 +72,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
public void testString() {
|
public void testString() {
|
||||||
createIndex("test", Settings.EMPTY, "test", "field", "type=text");
|
createIndex("test", Settings.EMPTY, "test", "field", "type=text");
|
||||||
for (int value = 0; value <= 10; value++) {
|
for (int value = 0; value <= 10; value++) {
|
||||||
client().prepareIndex("test", "test").setSource("field", String.format(Locale.ENGLISH, "%03d", value)).get();
|
client().prepareIndex("test", "test").setSource("field",
|
||||||
|
String.format(Locale.ENGLISH, "%03d", value)).get();
|
||||||
}
|
}
|
||||||
client().admin().indices().prepareRefresh().get();
|
client().admin().indices().prepareRefresh().get();
|
||||||
|
|
||||||
|
@ -83,10 +81,14 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
assertThat(result.getAllFieldStats().get("field").getMaxDoc(), equalTo(11L));
|
assertThat(result.getAllFieldStats().get("field").getMaxDoc(), equalTo(11L));
|
||||||
assertThat(result.getAllFieldStats().get("field").getDocCount(), equalTo(11L));
|
assertThat(result.getAllFieldStats().get("field").getDocCount(), equalTo(11L));
|
||||||
assertThat(result.getAllFieldStats().get("field").getDensity(), equalTo(100));
|
assertThat(result.getAllFieldStats().get("field").getDensity(), equalTo(100));
|
||||||
assertThat(result.getAllFieldStats().get("field").getMinValue(), equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 0))));
|
assertThat(result.getAllFieldStats().get("field").getMinValue(),
|
||||||
assertThat(result.getAllFieldStats().get("field").getMaxValue(), equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 10))));
|
equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 0))));
|
||||||
assertThat(result.getAllFieldStats().get("field").getMinValueAsString(), equalTo(String.format(Locale.ENGLISH, "%03d", 0)));
|
assertThat(result.getAllFieldStats().get("field").getMaxValue(),
|
||||||
assertThat(result.getAllFieldStats().get("field").getMaxValueAsString(), equalTo(String.format(Locale.ENGLISH, "%03d", 10)));
|
equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 10))));
|
||||||
|
assertThat(result.getAllFieldStats().get("field").getMinValueAsString(),
|
||||||
|
equalTo(String.format(Locale.ENGLISH, "%03d", 0)));
|
||||||
|
assertThat(result.getAllFieldStats().get("field").getMaxValueAsString(),
|
||||||
|
equalTo(String.format(Locale.ENGLISH, "%03d", 10)));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDouble() {
|
public void testDouble() {
|
||||||
|
@ -126,6 +128,11 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
private void testNumberRange(String fieldName, String fieldType, long min, long max) {
|
private void testNumberRange(String fieldName, String fieldType, long min, long max) {
|
||||||
createIndex("test", Settings.EMPTY, "test", fieldName, "type=" + fieldType);
|
createIndex("test", Settings.EMPTY, "test", fieldName, "type=" + fieldType);
|
||||||
|
// index=false
|
||||||
|
createIndex("test1", Settings.EMPTY, "test", fieldName, "type=" + fieldType + ",index=false");
|
||||||
|
// no value
|
||||||
|
createIndex("test2", Settings.EMPTY, "test", fieldName, "type=" + fieldType);
|
||||||
|
|
||||||
for (long value = min; value <= max; value++) {
|
for (long value = min; value <= max; value++) {
|
||||||
client().prepareIndex("test", "test").setSource(fieldName, value).get();
|
client().prepareIndex("test", "test").setSource(fieldName, value).get();
|
||||||
}
|
}
|
||||||
|
@ -138,78 +145,64 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
|
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
|
||||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(min));
|
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(min));
|
||||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(max));
|
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(max));
|
||||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(java.lang.Long.toString(min)));
|
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(),
|
||||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(java.lang.Long.toString(max)));
|
equalTo(java.lang.Long.toString(min)));
|
||||||
|
assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(),
|
||||||
|
equalTo(java.lang.Long.toString(max)));
|
||||||
|
assertThat(result.getAllFieldStats().get(fieldName).isSearchable(), equalTo(true));
|
||||||
|
assertThat(result.getAllFieldStats().get(fieldName).isAggregatable(), equalTo(true));
|
||||||
|
|
||||||
client().admin().indices().prepareDelete("test").get();
|
client().admin().indices().prepareDelete("test").get();
|
||||||
|
client().admin().indices().prepareDelete("test1").get();
|
||||||
|
client().admin().indices().prepareDelete("test2").get();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMerge() {
|
public void testMerge() {
|
||||||
List<FieldStats> stats = new ArrayList<>();
|
List<FieldStats> stats = new ArrayList<>();
|
||||||
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
|
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
|
||||||
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
|
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
|
||||||
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
|
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
|
||||||
|
|
||||||
FieldStats stat = new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L);
|
FieldStats stat = new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L);
|
||||||
for (FieldStats otherStat : stats) {
|
for (FieldStats otherStat : stats) {
|
||||||
stat.append(otherStat);
|
stat.accumulate(otherStat);
|
||||||
}
|
}
|
||||||
assertThat(stat.getMaxDoc(), equalTo(4L));
|
assertThat(stat.getMaxDoc(), equalTo(4L));
|
||||||
assertThat(stat.getDocCount(), equalTo(4L));
|
assertThat(stat.getDocCount(), equalTo(4L));
|
||||||
assertThat(stat.getSumDocFreq(), equalTo(4L));
|
assertThat(stat.getSumDocFreq(), equalTo(4L));
|
||||||
assertThat(stat.getSumTotalTermFreq(), equalTo(4L));
|
assertThat(stat.getSumTotalTermFreq(), equalTo(4L));
|
||||||
|
assertThat(stat.isSearchable(), equalTo(true));
|
||||||
|
assertThat(stat.isAggregatable(), equalTo(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMerge_notAvailable() {
|
public void testMerge_notAvailable() {
|
||||||
List<FieldStats> stats = new ArrayList<>();
|
List<FieldStats> stats = new ArrayList<>();
|
||||||
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
|
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, true, 1L, 1L));
|
||||||
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
|
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, true, 1L, 1L));
|
||||||
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, 1L, 1L));
|
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
|
||||||
|
|
||||||
FieldStats stat = new FieldStats.Long(1, -1L, -1L, -1L, 1L, 1L);
|
FieldStats stat = new FieldStats.Long(1, -1L, -1L, -1L, false, true, 1L, 1L);
|
||||||
for (FieldStats otherStat : stats) {
|
for (FieldStats otherStat : stats) {
|
||||||
stat.append(otherStat);
|
stat.accumulate(otherStat);
|
||||||
}
|
}
|
||||||
assertThat(stat.getMaxDoc(), equalTo(4L));
|
assertThat(stat.getMaxDoc(), equalTo(4L));
|
||||||
assertThat(stat.getDocCount(), equalTo(-1L));
|
assertThat(stat.getDocCount(), equalTo(-1L));
|
||||||
assertThat(stat.getSumDocFreq(), equalTo(-1L));
|
assertThat(stat.getSumDocFreq(), equalTo(-1L));
|
||||||
assertThat(stat.getSumTotalTermFreq(), equalTo(-1L));
|
assertThat(stat.getSumTotalTermFreq(), equalTo(-1L));
|
||||||
|
assertThat(stat.isSearchable(), equalTo(true));
|
||||||
|
assertThat(stat.isAggregatable(), equalTo(true));
|
||||||
|
|
||||||
stats.add(new FieldStats.Long(1, -1L, -1L, -1L, 1L, 1L));
|
stats.add(new FieldStats.Long(1, -1L, -1L, -1L, true, true, 1L, 1L));
|
||||||
stat = stats.remove(0);
|
stat = stats.remove(0);
|
||||||
for (FieldStats otherStat : stats) {
|
for (FieldStats otherStat : stats) {
|
||||||
stat.append(otherStat);
|
stat.accumulate(otherStat);
|
||||||
}
|
}
|
||||||
assertThat(stat.getMaxDoc(), equalTo(4L));
|
assertThat(stat.getMaxDoc(), equalTo(4L));
|
||||||
assertThat(stat.getDocCount(), equalTo(-1L));
|
assertThat(stat.getDocCount(), equalTo(-1L));
|
||||||
assertThat(stat.getSumDocFreq(), equalTo(-1L));
|
assertThat(stat.getSumDocFreq(), equalTo(-1L));
|
||||||
assertThat(stat.getSumTotalTermFreq(), equalTo(-1L));
|
assertThat(stat.getSumTotalTermFreq(), equalTo(-1L));
|
||||||
}
|
assertThat(stat.isSearchable(), equalTo(true));
|
||||||
|
assertThat(stat.isAggregatable(), equalTo(true));
|
||||||
public void testInvalidField() {
|
|
||||||
createIndex("test1", Settings.EMPTY, "test", "field1", "type=text");
|
|
||||||
client().prepareIndex("test1", "test").setSource("field1", "a").get();
|
|
||||||
client().prepareIndex("test1", "test").setSource("field1", "b").get();
|
|
||||||
|
|
||||||
createIndex("test2", Settings.EMPTY, "test", "field2", "type=text");
|
|
||||||
client().prepareIndex("test2", "test").setSource("field2", "a").get();
|
|
||||||
client().prepareIndex("test2", "test").setSource("field2", "b").get();
|
|
||||||
client().admin().indices().prepareRefresh().get();
|
|
||||||
|
|
||||||
FieldStatsResponse result = client().prepareFieldStats().setFields("field1", "field2").get();
|
|
||||||
assertThat(result.getFailedShards(), equalTo(2));
|
|
||||||
assertThat(result.getTotalShards(), equalTo(2));
|
|
||||||
assertThat(result.getSuccessfulShards(), equalTo(0));
|
|
||||||
assertThat(result.getShardFailures()[0].reason(), either(containsString("field [field1] doesn't exist")).or(containsString("field [field2] doesn't exist")));
|
|
||||||
assertThat(result.getIndicesMergedFieldStats().size(), equalTo(0));
|
|
||||||
|
|
||||||
// will only succeed on the 'test2' shard, because there the field does exist
|
|
||||||
result = client().prepareFieldStats().setFields("field1").get();
|
|
||||||
assertThat(result.getFailedShards(), equalTo(1));
|
|
||||||
assertThat(result.getTotalShards(), equalTo(2));
|
|
||||||
assertThat(result.getSuccessfulShards(), equalTo(1));
|
|
||||||
assertThat(result.getShardFailures()[0].reason(), either(containsString("field [field1] doesn't exist")).or(containsString("field [field2] doesn't exist")));
|
|
||||||
assertThat(result.getIndicesMergedFieldStats().get("_all").get("field1").getMinValueAsString(), equalTo("a"));
|
|
||||||
assertThat(result.getIndicesMergedFieldStats().get("_all").get("field1").getMaxValueAsString(), equalTo("b"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNumberFiltering() {
|
public void testNumberFiltering() {
|
||||||
|
@ -229,21 +222,24 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-1"), new IndexConstraint("value", MAX, LTE, "0"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-1"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "0"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "0"), new IndexConstraint("value", MAX, LT, "1"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "0"),
|
||||||
|
new IndexConstraint("value", MAX, LT, "1"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "0"), new IndexConstraint("value", MAX, LTE, "1"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "0"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "1"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
|
@ -251,7 +247,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "1"), new IndexConstraint("value", MAX, LTE, "2"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "1"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "2"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
|
@ -259,14 +256,16 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, "1"), new IndexConstraint("value", MAX, LTE, "2"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT, "1"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "2"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2"), new IndexConstraint("value", MAX, LTE, "3"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "3"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
|
@ -274,7 +273,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "3"), new IndexConstraint("value", MAX, LTE, "4"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "3"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "4"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
|
@ -282,14 +282,16 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, "3"), new IndexConstraint("value", MAX, LTE, "4"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT, "3"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "4"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "1"), new IndexConstraint("value", MAX, LTE, "3"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "1"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "3"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||||
|
@ -298,7 +300,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, "1"), new IndexConstraint("value", MAX, LT, "3"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT, "1"),
|
||||||
|
new IndexConstraint("value", MAX, LT, "3"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
|
@ -321,51 +324,66 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
equalTo(dateTime1.getMillis()));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
equalTo(dateTime2.getMillis()));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime1Str));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime2Str));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2013-12-30T00:00:00.000Z"), new IndexConstraint("value", MAX, LTE, "2013-12-31T00:00:00.000Z"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2013-12-30T00:00:00.000Z"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "2013-12-31T00:00:00.000Z"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2013-12-31T00:00:00.000Z"), new IndexConstraint("value", MAX, LTE, "2014-01-01T00:00:00.000Z"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2013-12-31T00:00:00.000Z"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "2014-01-01T00:00:00.000Z"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
equalTo(dateTime1.getMillis()));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime1Str));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2014-01-01T00:00:00.000Z"), new IndexConstraint("value", MAX, LTE, "2014-01-02T00:00:00.000Z"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2014-01-01T00:00:00.000Z"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "2014-01-02T00:00:00.000Z"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
equalTo(dateTime2.getMillis()));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime2Str));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2014-01-02T00:00:00.000Z"), new IndexConstraint("value", MAX, LTE, "2014-01-03T00:00:00.000Z"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2014-01-02T00:00:00.000Z"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "2014-01-03T00:00:00.000Z"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2014-01-01T23:00:00.000Z"), new IndexConstraint("value", MAX, LTE, "2014-01-02T01:00:00.000Z"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2014-01-01T23:00:00.000Z"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, "2014-01-02T01:00:00.000Z"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
equalTo(dateTime2.getMillis()));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime2Str));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
|
@ -373,10 +391,14 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
equalTo(dateTime1.getMillis()));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
equalTo(dateTime2.getMillis()));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime1Str));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime2Str));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
|
@ -384,10 +406,14 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
equalTo(dateTime1.getMillis()));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
equalTo(dateTime2.getMillis()));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime1Str));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
|
||||||
|
equalTo(dateTime2Str));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDateFiltering_optionalFormat() {
|
public void testDateFiltering_optionalFormat() {
|
||||||
|
@ -401,16 +427,20 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
DateTime dateTime2 = new DateTime(2014, 1, 2, 0, 0, 0, 0, DateTimeZone.UTC);
|
DateTime dateTime2 = new DateTime(2014, 1, 2, 0, 0, 0, 0, DateTimeZone.UTC);
|
||||||
FieldStatsResponse response = client().prepareFieldStats()
|
FieldStatsResponse response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, String.valueOf(dateTime1.getMillis()), "epoch_millis"), new IndexConstraint("value", MAX, LTE, String.valueOf(dateTime2.getMillis()), "epoch_millis"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT,
|
||||||
|
String.valueOf(dateTime1.getMillis()), "epoch_millis"),
|
||||||
|
new IndexConstraint("value", MAX, LTE, String.valueOf(dateTime2.getMillis()), "epoch_millis"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo("2014-01-02T00:00:00.000Z"));
|
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
|
||||||
|
equalTo("2014-01-02T00:00:00.000Z"));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
client().prepareFieldStats()
|
client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
.setIndexContraints(new IndexConstraint("value", MIN, GT, String.valueOf(dateTime1.getMillis()), "xyz"))
|
.setIndexContraints(new IndexConstraint("value", MIN, GT,
|
||||||
|
String.valueOf(dateTime1.getMillis()), "xyz"))
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
fail("IllegalArgumentException should have been thrown");
|
fail("IllegalArgumentException should have been thrown");
|
||||||
|
@ -426,7 +456,15 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
.setLevel("indices")
|
.setLevel("indices")
|
||||||
.get();
|
.get();
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||||
assertThat(response.getIndicesMergedFieldStats().get("test1").size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().get("test1").size(), equalTo(1));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxDoc(), equalTo(0L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getDocCount(), equalTo(0L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getSumDocFreq(), equalTo(0L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getSumTotalTermFreq(), equalTo(0L));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").isSearchable(), equalTo(true));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").isAggregatable(), equalTo(true));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(null));
|
||||||
|
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(null));
|
||||||
|
|
||||||
response = client().prepareFieldStats()
|
response = client().prepareFieldStats()
|
||||||
.setFields("value")
|
.setFields("value")
|
||||||
|
@ -436,4 +474,15 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testMetaFieldsSearchable() {
|
||||||
|
createIndex("test1", Settings.EMPTY, "type", "value", "type=date");
|
||||||
|
FieldStatsResponse response = client().prepareFieldStats()
|
||||||
|
.setFields("_id", "_index")
|
||||||
|
.get();
|
||||||
|
assertThat(response.getAllFieldStats().size(), equalTo(2));
|
||||||
|
assertThat(response.getAllFieldStats().get("_id").isSearchable(), equalTo(true));
|
||||||
|
assertThat(response.getAllFieldStats().get("_index").isSearchable(), equalTo(true));
|
||||||
|
assertThat(response.getAllFieldStats().get("_id").isAggregatable(), equalTo(false));
|
||||||
|
assertThat(response.getAllFieldStats().get("_index").isAggregatable(), equalTo(true));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -188,7 +188,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
|
||||||
try {
|
try {
|
||||||
ifds.getForField(ft);
|
ifds.getForField(ft);
|
||||||
fail();
|
fail();
|
||||||
} catch (IllegalStateException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
assertThat(e.getMessage(), containsString("doc values"));
|
assertThat(e.getMessage(), containsString("doc values"));
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
@ -410,7 +410,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping));
|
DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping));
|
||||||
assertEquals(mapping, disabledMapper.mappingSource().toString());
|
assertEquals(mapping, disabledMapper.mappingSource().toString());
|
||||||
IllegalStateException e = expectThrows(IllegalStateException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> disabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder());
|
() -> disabledMapper.mappers().getMapper("field").fieldType().fielddataBuilder());
|
||||||
assertThat(e.getMessage(), containsString("Fielddata is disabled"));
|
assertThat(e.getMessage(), containsString("Fielddata is disabled"));
|
||||||
|
|
||||||
|
|
|
@ -673,7 +673,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
assertEquals(expectedMapping, mapper.mappingSource().toString());
|
assertEquals(expectedMapping, mapper.mappingSource().toString());
|
||||||
IllegalStateException e = expectThrows(IllegalStateException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> mapper.mappers().getMapper("field").fieldType().fielddataBuilder());
|
() -> mapper.mappers().getMapper("field").fieldType().fielddataBuilder());
|
||||||
assertThat(e.getMessage(), containsString("Fielddata is disabled"));
|
assertThat(e.getMessage(), containsString("Fielddata is disabled"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,8 @@ curl -XGET "http://localhost:9200/index1,index2/_field_stats?fields=rating"
|
||||||
Supported request options:
|
Supported request options:
|
||||||
|
|
||||||
[horizontal]
|
[horizontal]
|
||||||
`fields`:: A list of fields to compute stats for.
|
`fields`:: A list of fields to compute stats for. The field name supports wildcard notation. For example, using `text_*`
|
||||||
|
will cause all fields that match the expression to be returned.
|
||||||
`level`:: Defines if field stats should be returned on a per index level or on a
|
`level`:: Defines if field stats should be returned on a per index level or on a
|
||||||
cluster wide level. Valid values are `indices` and `cluster` (default).
|
cluster wide level. Valid values are `indices` and `cluster` (default).
|
||||||
|
|
||||||
|
@ -77,6 +78,14 @@ documents, or -1 if this measurement isn't available on one or more shards.
|
||||||
Term frequency is the total number of occurrences of a term in a particular
|
Term frequency is the total number of occurrences of a term in a particular
|
||||||
document and field.
|
document and field.
|
||||||
|
|
||||||
|
`is_searchable`
|
||||||
|
|
||||||
|
True if any of the instances of the field is searchable, false otherwise.
|
||||||
|
|
||||||
|
`is_aggregatable`
|
||||||
|
|
||||||
|
True if any of the instances of the field is aggregatable, false otherwise.
|
||||||
|
|
||||||
`min_value`::
|
`min_value`::
|
||||||
|
|
||||||
The lowest value in the field.
|
The lowest value in the field.
|
||||||
|
@ -128,7 +137,9 @@ Response:
|
||||||
"sum_doc_freq": 2258532,
|
"sum_doc_freq": 2258532,
|
||||||
"sum_total_term_freq": -1,
|
"sum_total_term_freq": -1,
|
||||||
"min_value": "2008-08-01T16:37:51.513Z",
|
"min_value": "2008-08-01T16:37:51.513Z",
|
||||||
"max_value": "2013-06-02T03:23:11.593Z"
|
"max_value": "2013-06-02T03:23:11.593Z",
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "true"
|
||||||
},
|
},
|
||||||
"display_name": {
|
"display_name": {
|
||||||
"max_doc": 1326564,
|
"max_doc": 1326564,
|
||||||
|
@ -137,7 +148,9 @@ Response:
|
||||||
"sum_doc_freq": 166535,
|
"sum_doc_freq": 166535,
|
||||||
"sum_total_term_freq": 166616,
|
"sum_total_term_freq": 166616,
|
||||||
"min_value": "0",
|
"min_value": "0",
|
||||||
"max_value": "정혜선"
|
"max_value": "정혜선",
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "false"
|
||||||
},
|
},
|
||||||
"answer_count": {
|
"answer_count": {
|
||||||
"max_doc": 1326564,
|
"max_doc": 1326564,
|
||||||
|
@ -146,7 +159,9 @@ Response:
|
||||||
"sum_doc_freq": 559540,
|
"sum_doc_freq": 559540,
|
||||||
"sum_total_term_freq": -1,
|
"sum_total_term_freq": -1,
|
||||||
"min_value": 0,
|
"min_value": 0,
|
||||||
"max_value": 160
|
"max_value": 160,
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "true"
|
||||||
},
|
},
|
||||||
"rating": {
|
"rating": {
|
||||||
"max_doc": 1326564,
|
"max_doc": 1326564,
|
||||||
|
@ -155,7 +170,9 @@ Response:
|
||||||
"sum_doc_freq": 1751568,
|
"sum_doc_freq": 1751568,
|
||||||
"sum_total_term_freq": -1,
|
"sum_total_term_freq": -1,
|
||||||
"min_value": -14,
|
"min_value": -14,
|
||||||
"max_value": 1277
|
"max_value": 1277,
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "true"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -165,6 +182,43 @@ Response:
|
||||||
|
|
||||||
<1> The `_all` key indicates that it contains the field stats of all indices in the cluster.
|
<1> The `_all` key indicates that it contains the field stats of all indices in the cluster.
|
||||||
|
|
||||||
|
NOTE: When using the cluster level field statistics it is possible to have conflicts if the same field is used in
|
||||||
|
different indices with incompatible types. For instance a field of type `long` is not compatible with a field of
|
||||||
|
type `float` or `string`. A section named `conflicts` is added to the response if one or more conflicts are raised.
|
||||||
|
It contains all the fields with conflicts and the reason of the incompatibility.
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
{
|
||||||
|
"_shards": {
|
||||||
|
"total": 1,
|
||||||
|
"successful": 1,
|
||||||
|
"failed": 0
|
||||||
|
},
|
||||||
|
"indices": {
|
||||||
|
"_all": {
|
||||||
|
"fields": {
|
||||||
|
"creation_date": {
|
||||||
|
"max_doc": 1326564,
|
||||||
|
"doc_count": 564633,
|
||||||
|
"density": 42,
|
||||||
|
"sum_doc_freq": 2258532,
|
||||||
|
"sum_total_term_freq": -1,
|
||||||
|
"min_value": "2008-08-01T16:37:51.513Z",
|
||||||
|
"max_value": "2013-06-02T03:23:11.593Z",
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "true"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"conflicts": {
|
||||||
|
"field_name_in_conflict1": "reason1",
|
||||||
|
"field_name_in_conflict2": "reason2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
==== Indices level field statistics example
|
==== Indices level field statistics example
|
||||||
|
|
||||||
|
@ -195,7 +249,9 @@ Response:
|
||||||
"sum_doc_freq": 2258532,
|
"sum_doc_freq": 2258532,
|
||||||
"sum_total_term_freq": -1,
|
"sum_total_term_freq": -1,
|
||||||
"min_value": "2008-08-01T16:37:51.513Z",
|
"min_value": "2008-08-01T16:37:51.513Z",
|
||||||
"max_value": "2013-06-02T03:23:11.593Z"
|
"max_value": "2013-06-02T03:23:11.593Z",
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "true"
|
||||||
},
|
},
|
||||||
"display_name": {
|
"display_name": {
|
||||||
"max_doc": 1326564,
|
"max_doc": 1326564,
|
||||||
|
@ -204,7 +260,9 @@ Response:
|
||||||
"sum_doc_freq": 166535,
|
"sum_doc_freq": 166535,
|
||||||
"sum_total_term_freq": 166616,
|
"sum_total_term_freq": 166616,
|
||||||
"min_value": "0",
|
"min_value": "0",
|
||||||
"max_value": "정혜선"
|
"max_value": "정혜선",
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "false"
|
||||||
},
|
},
|
||||||
"answer_count": {
|
"answer_count": {
|
||||||
"max_doc": 1326564,
|
"max_doc": 1326564,
|
||||||
|
@ -213,7 +271,9 @@ Response:
|
||||||
"sum_doc_freq": 559540,
|
"sum_doc_freq": 559540,
|
||||||
"sum_total_term_freq": -1,
|
"sum_total_term_freq": -1,
|
||||||
"min_value": 0,
|
"min_value": 0,
|
||||||
"max_value": 160
|
"max_value": 160,
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "true"
|
||||||
},
|
},
|
||||||
"rating": {
|
"rating": {
|
||||||
"max_doc": 1326564,
|
"max_doc": 1326564,
|
||||||
|
@ -222,7 +282,9 @@ Response:
|
||||||
"sum_doc_freq": 1751568,
|
"sum_doc_freq": 1751568,
|
||||||
"sum_total_term_freq": -1,
|
"sum_total_term_freq": -1,
|
||||||
"min_value": -14,
|
"min_value": -14,
|
||||||
"max_value": 1277
|
"max_value": 1277,
|
||||||
|
"is_searchable": "true",
|
||||||
|
"is_aggregatable": "true"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -296,4 +358,4 @@ curl -XPOST "http://localhost:9200/_field_stats?level=indices" -d '{
|
||||||
}'
|
}'
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
<1> Custom date format
|
<1> Custom date format
|
||||||
|
|
|
@ -5,17 +5,41 @@ setup:
|
||||||
body:
|
body:
|
||||||
mappings:
|
mappings:
|
||||||
test:
|
test:
|
||||||
properties:
|
properties:
|
||||||
foo:
|
foo:
|
||||||
type: text
|
type: text
|
||||||
number:
|
number:
|
||||||
type: long
|
type: long
|
||||||
|
bar:
|
||||||
|
type: long
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test_2
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
properties:
|
||||||
|
foo:
|
||||||
|
type: text
|
||||||
|
number:
|
||||||
|
type: long
|
||||||
|
bar:
|
||||||
|
type: text
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
index:
|
index:
|
||||||
index: test_1
|
index: test_1
|
||||||
type: test
|
type: test
|
||||||
id: id_1
|
id: id_1
|
||||||
body: { foo: "bar", number: 123 }
|
body: { foo: "bar", number: 123, bar: 123 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test_2
|
||||||
|
type: test
|
||||||
|
id: id_10
|
||||||
|
body: { foo: "babar", number: 456, bar: "123" }
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
indices.refresh: {}
|
indices.refresh: {}
|
||||||
|
@ -24,25 +48,30 @@ setup:
|
||||||
"Basic field stats":
|
"Basic field stats":
|
||||||
- do:
|
- do:
|
||||||
field_stats:
|
field_stats:
|
||||||
index: test_1
|
|
||||||
fields: [foo, number]
|
fields: [foo, number]
|
||||||
|
|
||||||
- match: { indices._all.fields.foo.max_doc: 1 }
|
- match: { indices._all.fields.foo.max_doc: 2 }
|
||||||
- match: { indices._all.fields.foo.doc_count: 1 }
|
- match: { indices._all.fields.foo.doc_count: 2 }
|
||||||
- match: { indices._all.fields.foo.min_value: "bar" }
|
- match: { indices._all.fields.foo.min_value: "babar" }
|
||||||
- match: { indices._all.fields.foo.max_value: "bar" }
|
- match: { indices._all.fields.foo.max_value: "bar" }
|
||||||
- match: { indices._all.fields.number.max_doc: 1 }
|
- is_false: indices._all.fields.foo.min_value_as_string
|
||||||
- match: { indices._all.fields.number.doc_count: 1 }
|
- is_false: indices._all.fields.foo.max_value_as_string
|
||||||
|
- match: { indices._all.fields.foo.searchable: true }
|
||||||
|
- match: { indices._all.fields.foo.aggregatable: false }
|
||||||
|
- match: { indices._all.fields.number.max_doc: 2 }
|
||||||
|
- match: { indices._all.fields.number.doc_count: 2 }
|
||||||
|
- match: { indices._all.fields.number.searchable: true }
|
||||||
|
- match: { indices._all.fields.number.aggregatable: true }
|
||||||
- match: { indices._all.fields.number.min_value: 123 }
|
- match: { indices._all.fields.number.min_value: 123 }
|
||||||
- match: { indices._all.fields.number.min_value_as_string: "123" }
|
- match: { indices._all.fields.number.min_value_as_string: "123" }
|
||||||
- match: { indices._all.fields.number.max_value: 123 }
|
- match: { indices._all.fields.number.max_value: 456 }
|
||||||
- match: { indices._all.fields.number.max_value_as_string: "123" }
|
- match: { indices._all.fields.number.max_value_as_string: "456" }
|
||||||
|
- is_false: conflicts
|
||||||
|
|
||||||
---
|
---
|
||||||
"Basic field stats with level set to indices":
|
"Basic field stats with level set to indices":
|
||||||
- do:
|
- do:
|
||||||
field_stats:
|
field_stats:
|
||||||
index: test_1
|
|
||||||
fields: [foo, number]
|
fields: [foo, number]
|
||||||
level: indices
|
level: indices
|
||||||
|
|
||||||
|
@ -50,12 +79,35 @@ setup:
|
||||||
- match: { indices.test_1.fields.foo.doc_count: 1 }
|
- match: { indices.test_1.fields.foo.doc_count: 1 }
|
||||||
- match: { indices.test_1.fields.foo.min_value: "bar" }
|
- match: { indices.test_1.fields.foo.min_value: "bar" }
|
||||||
- match: { indices.test_1.fields.foo.max_value: "bar" }
|
- match: { indices.test_1.fields.foo.max_value: "bar" }
|
||||||
|
- is_false: indices.test_1.fields.foo.min_value_as_string
|
||||||
|
- is_false: indices.test_1.fields.foo.max_value_as_string
|
||||||
|
- match: { indices.test_1.fields.foo.searchable: true }
|
||||||
|
- match: { indices.test_1.fields.foo.aggregatable: false }
|
||||||
- match: { indices.test_1.fields.number.max_doc: 1 }
|
- match: { indices.test_1.fields.number.max_doc: 1 }
|
||||||
- match: { indices.test_1.fields.number.doc_count: 1 }
|
- match: { indices.test_1.fields.number.doc_count: 1 }
|
||||||
|
- match: { indices.test_1.fields.number.searchable: true }
|
||||||
|
- match: { indices.test_1.fields.number.aggregatable: true }
|
||||||
- match: { indices.test_1.fields.number.min_value: 123 }
|
- match: { indices.test_1.fields.number.min_value: 123 }
|
||||||
- match: { indices.test_1.fields.number.min_value_as_string: "123" }
|
- match: { indices.test_1.fields.number.min_value_as_string: "123" }
|
||||||
- match: { indices.test_1.fields.number.max_value: 123 }
|
- match: { indices.test_1.fields.number.max_value: 123 }
|
||||||
- match: { indices.test_1.fields.number.max_value_as_string: "123" }
|
- match: { indices.test_1.fields.number.max_value_as_string: "123" }
|
||||||
|
- match: { indices.test_2.fields.foo.max_doc: 1 }
|
||||||
|
- match: { indices.test_2.fields.foo.doc_count: 1 }
|
||||||
|
- match: { indices.test_2.fields.foo.min_value: "babar" }
|
||||||
|
- match: { indices.test_2.fields.foo.max_value: "babar" }
|
||||||
|
- is_false: indices.test_2.fields.foo.min_value_as_string
|
||||||
|
- is_false: indices.test_2.fields.foo.max_value_as_string
|
||||||
|
- match: { indices.test_2.fields.foo.searchable: true }
|
||||||
|
- match: { indices.test_2.fields.foo.aggregatable: false }
|
||||||
|
- match: { indices.test_2.fields.number.max_doc: 1 }
|
||||||
|
- match: { indices.test_2.fields.number.doc_count: 1 }
|
||||||
|
- match: { indices.test_2.fields.number.searchable: true }
|
||||||
|
- match: { indices.test_2.fields.number.aggregatable: true }
|
||||||
|
- match: { indices.test_2.fields.number.min_value: 456 }
|
||||||
|
- match: { indices.test_2.fields.number.min_value_as_string: "456" }
|
||||||
|
- match: { indices.test_2.fields.number.max_value: 456 }
|
||||||
|
- match: { indices.test_2.fields.number.max_value_as_string: "456" }
|
||||||
|
- is_false: conflicts
|
||||||
|
|
||||||
---
|
---
|
||||||
"Field stats with filtering":
|
"Field stats with filtering":
|
||||||
|
@ -68,9 +120,12 @@ setup:
|
||||||
|
|
||||||
- match: { indices.test_1.fields.foo.max_doc: 1 }
|
- match: { indices.test_1.fields.foo.max_doc: 1 }
|
||||||
- match: { indices.test_1.fields.foo.doc_count: 1 }
|
- match: { indices.test_1.fields.foo.doc_count: 1 }
|
||||||
|
- match: { indices.test_1.fields.foo.searchable: true }
|
||||||
|
- match: { indices.test_1.fields.foo.aggregatable: false }
|
||||||
- match: { indices.test_1.fields.foo.min_value: "bar" }
|
- match: { indices.test_1.fields.foo.min_value: "bar" }
|
||||||
- match: { indices.test_1.fields.foo.max_value: "bar" }
|
- match: { indices.test_1.fields.foo.max_value: "bar" }
|
||||||
- is_false: indices.test_1.fields.number
|
- is_false: indices.test_1.fields.number
|
||||||
|
- is_false: conflicts
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
field_stats:
|
field_stats:
|
||||||
|
@ -86,5 +141,28 @@ setup:
|
||||||
catch: request
|
catch: request
|
||||||
field_stats:
|
field_stats:
|
||||||
index: test_1
|
index: test_1
|
||||||
fields : ["foo"]
|
fields: ["foo"]
|
||||||
body: { fields: ["foo"]}
|
body: { fields: ["foo"]}
|
||||||
|
|
||||||
|
---
|
||||||
|
"Field stats with conflicts":
|
||||||
|
- do:
|
||||||
|
field_stats:
|
||||||
|
fields: [foo, number, bar]
|
||||||
|
|
||||||
|
- match: { indices._all.fields.foo.max_doc: 2 }
|
||||||
|
- match: { indices._all.fields.foo.doc_count: 2 }
|
||||||
|
- match: { indices._all.fields.foo.min_value: "babar" }
|
||||||
|
- match: { indices._all.fields.foo.max_value: "bar" }
|
||||||
|
- match: { indices._all.fields.foo.searchable: true }
|
||||||
|
- match: { indices._all.fields.foo.aggregatable: false }
|
||||||
|
- match: { indices._all.fields.number.max_doc: 2 }
|
||||||
|
- match: { indices._all.fields.number.doc_count: 2 }
|
||||||
|
- match: { indices._all.fields.number.searchable: true }
|
||||||
|
- match: { indices._all.fields.number.aggregatable: true }
|
||||||
|
- match: { indices._all.fields.number.min_value: 123 }
|
||||||
|
- match: { indices._all.fields.number.min_value_as_string: "123" }
|
||||||
|
- match: { indices._all.fields.number.max_value: 456 }
|
||||||
|
- match: { indices._all.fields.number.max_value_as_string: "456" }
|
||||||
|
- match: { conflicts.bar: "Field [bar] of type [text] conflicts with existing field of type [whole-number] in other index." }
|
||||||
|
- is_false: indices._all.fields.bar
|
||||||
|
|
Loading…
Reference in New Issue