Removes FieldStats API (#25628)

* Removes FieldStats API

* iter

* iter
This commit is contained in:
Colin Goodheart-Smithe 2017-07-13 11:56:46 +01:00 committed by GitHub
parent a85b22b298
commit 11477a608f
30 changed files with 18 additions and 4053 deletions

View File

@ -156,8 +156,6 @@ import org.elasticsearch.action.explain.TransportExplainAction;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction;
import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction;
import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesIndexAction;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
import org.elasticsearch.action.fieldstats.TransportFieldStatsAction;
import org.elasticsearch.action.get.GetAction;
import org.elasticsearch.action.get.MultiGetAction;
import org.elasticsearch.action.get.TransportGetAction;
@ -213,7 +211,6 @@ import org.elasticsearch.plugins.ActionPlugin.ActionHandler;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.rest.action.RestFieldCapabilitiesAction;
import org.elasticsearch.rest.action.RestFieldStatsAction;
import org.elasticsearch.rest.action.RestMainAction;
import org.elasticsearch.rest.action.admin.cluster.RestCancelTasksAction;
import org.elasticsearch.rest.action.admin.cluster.RestClusterAllocationExplainAction;
@ -493,7 +490,6 @@ public class ActionModule extends AbstractModule {
actions.register(GetStoredScriptAction.INSTANCE, TransportGetStoredScriptAction.class);
actions.register(DeleteStoredScriptAction.INSTANCE, TransportDeleteStoredScriptAction.class);
actions.register(FieldStatsAction.INSTANCE, TransportFieldStatsAction.class);
actions.register(FieldCapabilitiesAction.INSTANCE, TransportFieldCapabilitiesAction.class,
TransportFieldCapabilitiesIndexAction.class);
@ -607,7 +603,6 @@ public class ActionModule extends AbstractModule {
registerHandler.accept(new RestPutStoredScriptAction(settings, restController));
registerHandler.accept(new RestDeleteStoredScriptAction(settings, restController));
registerHandler.accept(new RestFieldStatsAction(settings, restController));
registerHandler.accept(new RestFieldCapabilitiesAction(settings, restController));
// Tasks API

View File

@ -1,789 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.StringHelper;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Objects;
public abstract class FieldStats<T> implements Writeable, ToXContent {
private final byte type;
private long maxDoc;
private long docCount;
private long sumDocFreq;
private long sumTotalTermFreq;
private boolean isSearchable;
private boolean isAggregatable;
private boolean hasMinMax;
protected T minValue;
protected T maxValue;
/**
* Builds a FieldStats where min and max value are not available for the field.
* @param type The native type of this FieldStats
* @param maxDoc Max number of docs
* @param docCount the number of documents that have at least one term for this field,
* or -1 if this information isn't available for this field.
* @param sumDocFreq the sum of {@link TermsEnum#docFreq()} for all terms in this field,
* or -1 if this information isn't available for this field.
* @param sumTotalTermFreq the sum of {@link TermsEnum#totalTermFreq} for all terms in this field,
* or -1 if this measure isn't available for this field.
* @param isSearchable true if this field is searchable
* @param isAggregatable true if this field is aggregatable
*/
FieldStats(byte type, long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable) {
this.type = type;
this.maxDoc = maxDoc;
this.docCount = docCount;
this.sumDocFreq = sumDocFreq;
this.sumTotalTermFreq = sumTotalTermFreq;
this.isSearchable = isSearchable;
this.isAggregatable = isAggregatable;
this.hasMinMax = false;
}
/**
* Builds a FieldStats with min and max value for the field.
* @param type The native type of this FieldStats
* @param maxDoc Max number of docs
* @param docCount the number of documents that have at least one term for this field,
* or -1 if this information isn't available for this field.
* @param sumDocFreq the sum of {@link TermsEnum#docFreq()} for all terms in this field,
* or -1 if this information isn't available for this field.
* @param sumTotalTermFreq the sum of {@link TermsEnum#totalTermFreq} for all terms in this field,
* or -1 if this measure isn't available for this field.
* @param isSearchable true if this field is searchable
* @param isAggregatable true if this field is aggregatable
* @param minValue the minimum value indexed in this field
* @param maxValue the maximum value indexed in this field
*/
FieldStats(byte type,
long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable, T minValue, T maxValue) {
Objects.requireNonNull(minValue, "minValue must not be null");
Objects.requireNonNull(maxValue, "maxValue must not be null");
this.type = type;
this.maxDoc = maxDoc;
this.docCount = docCount;
this.sumDocFreq = sumDocFreq;
this.sumTotalTermFreq = sumTotalTermFreq;
this.isSearchable = isSearchable;
this.isAggregatable = isAggregatable;
this.hasMinMax = true;
this.minValue = minValue;
this.maxValue = maxValue;
}
byte getType() {
return this.type;
}
public String getDisplayType() {
switch (type) {
case 0:
return "integer";
case 1:
return "float";
case 2:
return "date";
case 3:
return "string";
case 4:
return "ip";
case 5:
return "geo_point";
default:
throw new IllegalArgumentException("Unknown type.");
}
}
/**
* @return true if min/max information is available for this field
*/
public boolean hasMinMax() {
return hasMinMax;
}
/**
* @return the total number of documents.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public long getMaxDoc() {
return maxDoc;
}
/**
* @return the number of documents that have at least one term for this field,
* or -1 if this measurement isn't available.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public long getDocCount() {
return docCount;
}
/**
* @return The percentage of documents that have at least one value for this field.
*
* This is a derived statistic and is based on: 'doc_count / max_doc'
*/
public int getDensity() {
if (docCount < 0 || maxDoc <= 0) {
return -1;
}
return (int) (docCount * 100 / maxDoc);
}
/**
* @return the sum of each term's document frequency in this field, or -1 if this measurement isn't available.
* Document frequency is the number of documents containing a particular term.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public long getSumDocFreq() {
return sumDocFreq;
}
/**
* @return the sum of the term frequencies of all terms in this field across all documents,
* or -1 if this measurement
* isn't available. Term frequency is the total number of occurrences of a term in a particular document and field.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public long getSumTotalTermFreq() {
return sumTotalTermFreq;
}
/**
* @return <code>true</code> if any of the instances of the field name is searchable.
*/
public boolean isSearchable() {
return isSearchable;
}
/**
* @return <code>true</code> if any of the instances of the field name is aggregatable.
*/
public boolean isAggregatable() {
return isAggregatable;
}
/**
* @return the lowest value in the field.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public T getMinValue() {
return minValue;
}
/**
* @return the highest value in the field.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public T getMaxValue() {
return maxValue;
}
/**
* @return the lowest value in the field represented as a string.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public abstract String getMinValueAsString();
/**
* @return the highest value in the field represented as a string.
*
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
*/
public abstract String getMaxValueAsString();
/**
* @param value The string to be parsed
* @param optionalFormat A string describing how to parse the specified value. Whether this parameter is supported
* depends on the implementation. If optionalFormat is specified and the implementation
* doesn't support it an {@link UnsupportedOperationException} is thrown
*/
protected abstract T valueOf(String value, String optionalFormat);
/**
* Accumulates the provided stats into this stats instance.
*/
public final void accumulate(FieldStats other) {
this.maxDoc += other.maxDoc;
if (other.docCount == -1) {
this.docCount = -1;
} else if (this.docCount != -1) {
this.docCount += other.docCount;
}
if (other.sumDocFreq == -1) {
this.sumDocFreq = -1;
} else if (this.sumDocFreq != -1) {
this.sumDocFreq += other.sumDocFreq;
}
if (other.sumTotalTermFreq == -1) {
this.sumTotalTermFreq = -1;
} else if (this.sumTotalTermFreq != -1) {
this.sumTotalTermFreq += other.sumTotalTermFreq;
}
isSearchable |= other.isSearchable;
isAggregatable |= other.isAggregatable;
assert type == other.getType();
if (hasMinMax && other.hasMinMax) {
updateMinMax((T) other.minValue, (T) other.maxValue);
} else {
hasMinMax = false;
minValue = null;
maxValue = null;
}
}
protected void updateMinMax(T min, T max) {
if (compare(minValue, min) > 0) {
minValue = min;
}
if (compare(maxValue, max) < 0) {
maxValue = max;
}
}
protected abstract int compare(T o1, T o2);
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(TYPE_FIELD, getDisplayType());
builder.field(MAX_DOC_FIELD, maxDoc);
builder.field(DOC_COUNT_FIELD, docCount);
builder.field(DENSITY_FIELD, getDensity());
builder.field(SUM_DOC_FREQ_FIELD, sumDocFreq);
builder.field(SUM_TOTAL_TERM_FREQ_FIELD, sumTotalTermFreq);
builder.field(SEARCHABLE_FIELD, isSearchable);
builder.field(AGGREGATABLE_FIELD, isAggregatable);
if (hasMinMax) {
toInnerXContent(builder);
}
builder.endObject();
return builder;
}
protected void toInnerXContent(XContentBuilder builder) throws IOException {
builder.field(MIN_VALUE_FIELD, getMinValue());
builder.field(MIN_VALUE_AS_STRING_FIELD, getMinValueAsString());
builder.field(MAX_VALUE_FIELD, getMaxValue());
builder.field(MAX_VALUE_AS_STRING_FIELD, getMaxValueAsString());
}
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeByte(type);
out.writeLong(maxDoc);
out.writeLong(docCount);
out.writeLong(sumDocFreq);
out.writeLong(sumTotalTermFreq);
out.writeBoolean(isSearchable);
out.writeBoolean(isAggregatable);
if (out.getVersion().onOrAfter(Version.V_5_2_0)) {
out.writeBoolean(hasMinMax);
if (hasMinMax) {
writeMinMax(out);
}
} else {
assert hasMinMax : "cannot serialize null min/max fieldstats in a mixed-cluster " +
"with pre-" + Version.V_5_2_0 + " nodes, remote version [" + out.getVersion() + "]";
writeMinMax(out);
}
}
protected abstract void writeMinMax(StreamOutput out) throws IOException;
/**
* @return <code>true</code> if this instance matches with the provided index constraint,
* otherwise <code>false</code> is returned
*/
public boolean match(IndexConstraint constraint) {
if (hasMinMax == false) {
return false;
}
int cmp;
T value = valueOf(constraint.getValue(), constraint.getOptionalFormat());
if (constraint.getProperty() == IndexConstraint.Property.MIN) {
cmp = compare(minValue, value);
} else if (constraint.getProperty() == IndexConstraint.Property.MAX) {
cmp = compare(maxValue, value);
} else {
throw new IllegalArgumentException("Unsupported property [" + constraint.getProperty() + "]");
}
switch (constraint.getComparison()) {
case GT:
return cmp > 0;
case GTE:
return cmp >= 0;
case LT:
return cmp < 0;
case LTE:
return cmp <= 0;
default:
throw new IllegalArgumentException("Unsupported comparison [" + constraint.getComparison() + "]");
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldStats<?> that = (FieldStats<?>) o;
if (type != that.type) return false;
if (maxDoc != that.maxDoc) return false;
if (docCount != that.docCount) return false;
if (sumDocFreq != that.sumDocFreq) return false;
if (sumTotalTermFreq != that.sumTotalTermFreq) return false;
if (isSearchable != that.isSearchable) return false;
if (isAggregatable != that.isAggregatable) return false;
if (hasMinMax != that.hasMinMax) return false;
if (hasMinMax == false) {
return true;
}
if (!minValue.equals(that.minValue)) return false;
return maxValue.equals(that.maxValue);
}
@Override
public int hashCode() {
return Objects.hash(type, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable,
hasMinMax, minValue, maxValue);
}
public static class Long extends FieldStats<java.lang.Long> {
public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable) {
super((byte) 0, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
public Long(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable,
long minValue, long maxValue) {
super((byte) 0, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable, minValue, maxValue);
}
@Override
public int compare(java.lang.Long o1, java.lang.Long o2) {
return o1.compareTo(o2);
}
@Override
public void writeMinMax(StreamOutput out) throws IOException {
out.writeLong(minValue);
out.writeLong(maxValue);
}
@Override
public java.lang.Long valueOf(String value, String optionalFormat) {
return java.lang.Long.parseLong(value);
}
@Override
public String getMinValueAsString() {
return java.lang.Long.toString(minValue);
}
@Override
public String getMaxValueAsString() {
return java.lang.Long.toString(maxValue);
}
}
public static class Double extends FieldStats<java.lang.Double> {
public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable) {
super((byte) 1, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable);
}
public Double(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable,
double minValue, double maxValue) {
super((byte) 1, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable,
minValue, maxValue);
}
@Override
public int compare(java.lang.Double o1, java.lang.Double o2) {
return o1.compareTo(o2);
}
@Override
public void writeMinMax(StreamOutput out) throws IOException {
out.writeDouble(minValue);
out.writeDouble(maxValue);
}
@Override
public java.lang.Double valueOf(String value, String optionalFormat) {
if (optionalFormat != null) {
throw new UnsupportedOperationException("custom format isn't supported");
}
return java.lang.Double.parseDouble(value);
}
@Override
public String getMinValueAsString() {
return java.lang.Double.toString(minValue);
}
@Override
public String getMaxValueAsString() {
return java.lang.Double.toString(maxValue);
}
}
public static class Date extends FieldStats<java.lang.Long> {
private FormatDateTimeFormatter formatter;
public Date(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable) {
super((byte) 2, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable);
this.formatter = null;
}
public Date(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable,
FormatDateTimeFormatter formatter,
long minValue, long maxValue) {
super((byte) 2, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable,
minValue, maxValue);
this.formatter = formatter;
}
@Override
public int compare(java.lang.Long o1, java.lang.Long o2) {
return o1.compareTo(o2);
}
@Override
public void writeMinMax(StreamOutput out) throws IOException {
out.writeString(formatter.format());
out.writeLong(minValue);
out.writeLong(maxValue);
}
@Override
public java.lang.Long valueOf(String value, String fmt) {
FormatDateTimeFormatter f = formatter;
if (fmt != null) {
f = Joda.forPattern(fmt);
}
return f.parser().parseDateTime(value).getMillis();
}
@Override
public String getMinValueAsString() {
return formatter.printer().print(minValue);
}
@Override
public String getMaxValueAsString() {
return formatter.printer().print(maxValue);
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
Date that = (Date) o;
return Objects.equals(formatter == null ? null : formatter.format(),
that.formatter == null ? null : that.formatter.format());
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (formatter == null ? 0 : formatter.format().hashCode());
return result;
}
}
public static class Text extends FieldStats<BytesRef> {
public Text(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable) {
super((byte) 3, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
public Text(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable,
BytesRef minValue, BytesRef maxValue) {
super((byte) 3, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable,
minValue, maxValue);
}
@Override
public int compare(BytesRef o1, BytesRef o2) {
return o1.compareTo(o2);
}
@Override
public void writeMinMax(StreamOutput out) throws IOException {
out.writeBytesRef(minValue);
out.writeBytesRef(maxValue);
}
@Override
protected BytesRef valueOf(String value, String optionalFormat) {
if (optionalFormat != null) {
throw new UnsupportedOperationException("custom format isn't supported");
}
return new BytesRef(value);
}
@Override
public String getMinValueAsString() {
return minValue.utf8ToString();
}
@Override
public String getMaxValueAsString() {
return maxValue.utf8ToString();
}
@Override
protected void toInnerXContent(XContentBuilder builder) throws IOException {
builder.field(MIN_VALUE_FIELD, getMinValueAsString());
builder.field(MAX_VALUE_FIELD, getMaxValueAsString());
}
}
public static class Ip extends FieldStats<InetAddress> {
public Ip(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable) {
super((byte) 4, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
public Ip(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable,
InetAddress minValue, InetAddress maxValue) {
super((byte) 4, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable,
minValue, maxValue);
}
@Override
public int compare(InetAddress o1, InetAddress o2) {
byte[] b1 = InetAddressPoint.encode(o1);
byte[] b2 = InetAddressPoint.encode(o2);
return StringHelper.compare(b1.length, b1, 0, b2, 0);
}
@Override
public void writeMinMax(StreamOutput out) throws IOException {
byte[] b1 = InetAddressPoint.encode(minValue);
byte[] b2 = InetAddressPoint.encode(maxValue);
out.writeByte((byte) b1.length);
out.writeBytes(b1);
out.writeByte((byte) b2.length);
out.writeBytes(b2);
}
@Override
public InetAddress valueOf(String value, String fmt) {
return InetAddresses.forString(value);
}
@Override
public String getMinValueAsString() {
return NetworkAddress.format(minValue);
}
@Override
public String getMaxValueAsString() {
return NetworkAddress.format(maxValue);
}
}
public static class GeoPoint extends FieldStats<org.elasticsearch.common.geo.GeoPoint> {
public GeoPoint(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable) {
super((byte) 5, maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
public GeoPoint(long maxDoc, long docCount, long sumDocFreq, long sumTotalTermFreq,
boolean isSearchable, boolean isAggregatable,
org.elasticsearch.common.geo.GeoPoint minValue, org.elasticsearch.common.geo.GeoPoint maxValue) {
super((byte) 5, maxDoc, docCount, sumDocFreq, sumTotalTermFreq, isSearchable, isAggregatable,
minValue, maxValue);
}
@Override
public org.elasticsearch.common.geo.GeoPoint valueOf(String value, String fmt) {
return org.elasticsearch.common.geo.GeoPoint.parseFromLatLon(value);
}
@Override
protected void updateMinMax(org.elasticsearch.common.geo.GeoPoint min, org.elasticsearch.common.geo.GeoPoint max) {
minValue.reset(Math.min(min.lat(), minValue.lat()), Math.min(min.lon(), minValue.lon()));
maxValue.reset(Math.max(max.lat(), maxValue.lat()), Math.max(max.lon(), maxValue.lon()));
}
@Override
public int compare(org.elasticsearch.common.geo.GeoPoint p1, org.elasticsearch.common.geo.GeoPoint p2) {
throw new IllegalArgumentException("compare is not supported for geo_point field stats");
}
@Override
public void writeMinMax(StreamOutput out) throws IOException {
out.writeDouble(minValue.lat());
out.writeDouble(minValue.lon());
out.writeDouble(maxValue.lat());
out.writeDouble(maxValue.lon());
}
@Override
public String getMinValueAsString() {
return minValue.toString();
}
@Override
public String getMaxValueAsString() {
return maxValue.toString();
}
}
public static FieldStats readFrom(StreamInput in) throws IOException {
byte type = in.readByte();
long maxDoc = in.readLong();
long docCount = in.readLong();
long sumDocFreq = in.readLong();
long sumTotalTermFreq = in.readLong();
boolean isSearchable = in.readBoolean();
boolean isAggregatable = in.readBoolean();
boolean hasMinMax = true;
if (in.getVersion().onOrAfter(Version.V_5_2_0)) {
hasMinMax = in.readBoolean();
}
switch (type) {
case 0:
if (hasMinMax) {
return new Long(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable, in.readLong(), in.readLong());
} else {
return new Long(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
case 1:
if (hasMinMax) {
return new Double(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable, in.readDouble(), in.readDouble());
} else {
return new Double(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
case 2:
if (hasMinMax) {
FormatDateTimeFormatter formatter = Joda.forPattern(in.readString());
return new Date(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable, formatter, in.readLong(), in.readLong());
} else {
return new Date(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
case 3:
if (hasMinMax) {
return new Text(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable, in.readBytesRef(), in.readBytesRef());
} else {
return new Text(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
case 4: {
if (hasMinMax == false) {
return new Ip(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
int l1 = in.readByte();
byte[] b1 = new byte[l1];
in.readBytes(b1, 0, l1);
int l2 = in.readByte();
byte[] b2 = new byte[l2];
in.readBytes(b2, 0, l2);
InetAddress min = InetAddressPoint.decode(b1);
InetAddress max = InetAddressPoint.decode(b2);
return new Ip(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable, min, max);
}
case 5: {
if (hasMinMax == false) {
return new GeoPoint(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable);
}
org.elasticsearch.common.geo.GeoPoint min = new org.elasticsearch.common.geo.GeoPoint(in.readDouble(), in.readDouble());
org.elasticsearch.common.geo.GeoPoint max = new org.elasticsearch.common.geo.GeoPoint(in.readDouble(), in.readDouble());
return new GeoPoint(maxDoc, docCount, sumDocFreq, sumTotalTermFreq,
isSearchable, isAggregatable, min, max);
}
default:
throw new IllegalArgumentException("Unknown type.");
}
}
private static final String TYPE_FIELD = "type";
private static final String MAX_DOC_FIELD = "max_doc";
private static final String DOC_COUNT_FIELD = "doc_count";
private static final String DENSITY_FIELD = "density";
private static final String SUM_DOC_FREQ_FIELD = "sum_doc_freq";
private static final String SUM_TOTAL_TERM_FREQ_FIELD = "sum_total_term_freq";
private static final String SEARCHABLE_FIELD = "searchable";
private static final String AGGREGATABLE_FIELD = "aggregatable";
private static final String MIN_VALUE_FIELD = "min_value";
private static final String MIN_VALUE_AS_STRING_FIELD = "min_value_as_string";
private static final String MAX_VALUE_FIELD = "max_value";
private static final String MAX_VALUE_AS_STRING_FIELD = "max_value_as_string";
}

View File

@ -1,43 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
public class FieldStatsAction extends Action<FieldStatsRequest, FieldStatsResponse, FieldStatsRequestBuilder> {
public static final FieldStatsAction INSTANCE = new FieldStatsAction();
public static final String NAME = "indices:data/read/field_stats";
private FieldStatsAction() {
super(NAME);
}
@Override
public FieldStatsResponse newResponse() {
return new FieldStatsResponse();
}
@Override
public FieldStatsRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new FieldStatsRequestBuilder(client, this);
}
}

View File

@ -1,207 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class FieldStatsRequest extends BroadcastRequest<FieldStatsRequest> {
public static final String DEFAULT_LEVEL = "cluster";
private String[] fields = Strings.EMPTY_ARRAY;
private String level = DEFAULT_LEVEL;
private IndexConstraint[] indexConstraints = new IndexConstraint[0];
private boolean useCache = true;
public String[] getFields() {
return fields;
}
public void setFields(String[] fields) {
if (fields == null) {
throw new NullPointerException("specified fields can't be null");
}
this.fields = fields;
}
public void setUseCache(boolean useCache) {
this.useCache = useCache;
}
public boolean shouldUseCache() {
return useCache;
}
public IndexConstraint[] getIndexConstraints() {
return indexConstraints;
}
public void setIndexConstraints(IndexConstraint[] indexConstraints) {
if (indexConstraints == null) {
throw new NullPointerException("specified index_constraints can't be null");
}
this.indexConstraints = indexConstraints;
}
public void source(XContentParser parser) throws IOException {
List<IndexConstraint> indexConstraints = new ArrayList<>();
List<String> fields = new ArrayList<>();
String fieldName = null;
Token token = parser.nextToken();
assert token == Token.START_OBJECT;
for (token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) {
switch (token) {
case FIELD_NAME:
fieldName = parser.currentName();
break;
case START_OBJECT:
if ("index_constraints".equals(fieldName)) {
parseIndexConstraints(indexConstraints, parser);
} else {
throw new IllegalArgumentException("unknown field [" + fieldName + "]");
}
break;
case START_ARRAY:
if ("fields".equals(fieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token.isValue()) {
fields.add(parser.text());
} else {
throw new IllegalArgumentException("unexpected token [" + token + "]");
}
}
} else {
throw new IllegalArgumentException("unknown field [" + fieldName + "]");
}
break;
default:
throw new IllegalArgumentException("unexpected token [" + token + "]");
}
}
this.fields = fields.toArray(new String[fields.size()]);
this.indexConstraints = indexConstraints.toArray(new IndexConstraint[indexConstraints.size()]);
}
private static void parseIndexConstraints(List<IndexConstraint> indexConstraints,
XContentParser parser) throws IOException {
Token token = parser.currentToken();
assert token == Token.START_OBJECT;
String field = null;
String currentName = null;
for (token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) {
if (token == Token.FIELD_NAME) {
field = currentName = parser.currentName();
} else if (token == Token.START_OBJECT) {
for (Token fieldToken = parser.nextToken();
fieldToken != Token.END_OBJECT; fieldToken = parser.nextToken()) {
if (fieldToken == Token.FIELD_NAME) {
currentName = parser.currentName();
} else if (fieldToken == Token.START_OBJECT) {
IndexConstraint.Property property = IndexConstraint.Property.parse(currentName);
String value = null;
String optionalFormat = null;
IndexConstraint.Comparison comparison = null;
for (Token propertyToken = parser.nextToken();
propertyToken != Token.END_OBJECT; propertyToken = parser.nextToken()) {
if (propertyToken.isValue()) {
if ("format".equals(parser.currentName())) {
optionalFormat = parser.text();
} else {
comparison = IndexConstraint.Comparison.parse(parser.currentName());
value = parser.text();
}
} else {
if (propertyToken != Token.FIELD_NAME) {
throw new IllegalArgumentException("unexpected token [" + propertyToken + "]");
}
}
}
indexConstraints.add(new IndexConstraint(field, property, comparison, value, optionalFormat));
} else {
throw new IllegalArgumentException("unexpected token [" + fieldToken + "]");
}
}
} else {
throw new IllegalArgumentException("unexpected token [" + token + "]");
}
}
}
public String level() {
return level;
}
public void level(String level) {
this.level = level;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if ("cluster".equals(level) == false && "indices".equals(level) == false) {
validationException =
ValidateActions.addValidationError("invalid level option [" + level + "]", validationException);
}
if (fields == null || fields.length == 0) {
validationException = ValidateActions.addValidationError("no fields specified", validationException);
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
fields = in.readStringArray();
int size = in.readVInt();
indexConstraints = new IndexConstraint[size];
for (int i = 0; i < size; i++) {
indexConstraints[i] = new IndexConstraint(in);
}
level = in.readString();
useCache = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(fields);
out.writeVInt(indexConstraints.length);
for (IndexConstraint indexConstraint : indexConstraints) {
out.writeString(indexConstraint.getField());
out.writeByte(indexConstraint.getProperty().getId());
out.writeByte(indexConstraint.getComparison().getId());
out.writeString(indexConstraint.getValue());
out.writeOptionalString(indexConstraint.getOptionalFormat());
}
out.writeString(level);
out.writeBoolean(useCache);
}
}

View File

@ -1,51 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
public class FieldStatsRequestBuilder extends
BroadcastOperationRequestBuilder<FieldStatsRequest, FieldStatsResponse, FieldStatsRequestBuilder> {
public FieldStatsRequestBuilder(ElasticsearchClient client, FieldStatsAction action) {
super(client, action, new FieldStatsRequest());
}
public FieldStatsRequestBuilder setFields(String... fields) {
request().setFields(fields);
return this;
}
public FieldStatsRequestBuilder setIndexContraints(IndexConstraint... fields) {
request().setIndexConstraints(fields);
return this;
}
public FieldStatsRequestBuilder setLevel(String level) {
request().level(level);
return this;
}
public FieldStatsRequestBuilder setUseCache(boolean useCache) {
request().setUseCache(useCache);
return this;
}
}

View File

@ -1,118 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.Version;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class FieldStatsResponse extends BroadcastResponse {
private Map<String, Map<String, FieldStats>> indicesMergedFieldStats;
private Map<String, String> conflicts;
public FieldStatsResponse() {
}
public FieldStatsResponse(int totalShards, int successfulShards, int failedShards,
List<ShardOperationFailedException> shardFailures,
Map<String, Map<String, FieldStats>> indicesMergedFieldStats,
Map<String, String> conflicts) {
super(totalShards, successfulShards, failedShards, shardFailures);
this.indicesMergedFieldStats = indicesMergedFieldStats;
this.conflicts = conflicts;
}
@Nullable
public Map<String, FieldStats> getAllFieldStats() {
return indicesMergedFieldStats.get("_all");
}
public Map<String, String> getConflicts() {
return conflicts;
}
public Map<String, Map<String, FieldStats>> getIndicesMergedFieldStats() {
return indicesMergedFieldStats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
indicesMergedFieldStats = new HashMap<>(size);
for (int i = 0; i < size; i++) {
String key = in.readString();
int indexSize = in.readVInt();
Map<String, FieldStats> indexFieldStats = new HashMap<>(indexSize);
indicesMergedFieldStats.put(key, indexFieldStats);
for (int j = 0; j < indexSize; j++) {
key = in.readString();
FieldStats value = FieldStats.readFrom(in);
indexFieldStats.put(key, value);
}
}
size = in.readVInt();
conflicts = new HashMap<>(size);
for (int i = 0; i < size; i++) {
String key = in.readString();
String value = in.readString();
conflicts.put(key, value);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(indicesMergedFieldStats.size());
for (Map.Entry<String, Map<String, FieldStats>> entry1 : indicesMergedFieldStats.entrySet()) {
out.writeString(entry1.getKey());
int size = entry1.getValue().size();
if (out.getVersion().before(Version.V_5_2_0)) {
// filter fieldstats without min/max information
for (FieldStats stats : entry1.getValue().values()) {
if (stats.hasMinMax() == false) {
size--;
}
}
}
out.writeVInt(size);
for (Map.Entry<String, FieldStats> entry2 : entry1.getValue().entrySet()) {
if (entry2.getValue().hasMinMax() || out.getVersion().onOrAfter(Version.V_5_2_0)) {
out.writeString(entry2.getKey());
entry2.getValue().writeTo(out);
}
}
}
out.writeVInt(conflicts.size());
for (Map.Entry<String, String> entry : conflicts.entrySet()) {
out.writeString(entry.getKey());
out.writeString(entry.getValue());
}
}
}

View File

@ -1,72 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.action.support.broadcast.BroadcastShardRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class FieldStatsShardRequest extends BroadcastShardRequest {
private String[] fields;
private boolean useCache;
public FieldStatsShardRequest() {
}
public FieldStatsShardRequest(ShardId shardId, FieldStatsRequest request) {
super(shardId, request);
Set<String> fields = new HashSet<>(Arrays.asList(request.getFields()));
for (IndexConstraint indexConstraint : request.getIndexConstraints()) {
fields.add(indexConstraint.getField());
}
this.fields = fields.toArray(new String[fields.size()]);
useCache = request.shouldUseCache();
}
public String[] getFields() {
return fields;
}
public boolean shouldUseCache() {
return useCache;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
fields = in.readStringArray();
useCache = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(fields);
out.writeBoolean(useCache);
}
}

View File

@ -1,85 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.broadcast.BroadcastShardResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
public class FieldStatsShardResponse extends BroadcastShardResponse {
private Map<String, FieldStats<?>> fieldStats;
public FieldStatsShardResponse() {
}
public FieldStatsShardResponse(ShardId shardId, Map<String, FieldStats<?>> fieldStats) {
super(shardId);
this.fieldStats = fieldStats;
}
public Map<String, FieldStats<?>> getFieldStats() {
return fieldStats;
}
Map<String, FieldStats<?> > filterNullMinMax() {
return fieldStats.entrySet().stream()
.filter((e) -> e.getValue().hasMinMax())
.collect(Collectors.toMap(p -> p.getKey(), p -> p.getValue()));
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
final int size = in.readVInt();
fieldStats = new HashMap<>(size);
for (int i = 0; i < size; i++) {
String key = in.readString();
FieldStats value = FieldStats.readFrom(in);
fieldStats.put(key, value);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
final Map<String, FieldStats<?> > stats;
if (out.getVersion().before(Version.V_5_2_0)) {
/**
* FieldStats with null min/max are not (de)serializable in versions prior to {@link Version.V_5_2_0_UNRELEASED}
*/
stats = filterNullMinMax();
} else {
stats = getFieldStats();
}
out.writeVInt(stats.size());
for (Map.Entry<String, FieldStats<?>> entry : stats.entrySet()) {
out.writeString(entry.getKey());
entry.getValue().writeTo(out);
}
}
}

View File

@ -1,183 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.common.io.stream.StreamInput;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public class IndexConstraint {
private final String field;
private final Property property;
private final Comparison comparison;
private final String value;
private final String optionalFormat;
IndexConstraint(StreamInput input) throws IOException {
this.field = input.readString();
this.property = Property.read(input.readByte());
this.comparison = Comparison.read(input.readByte());
this.value = input.readString();
this.optionalFormat = input.readOptionalString();
}
public IndexConstraint(String field, Property property, Comparison comparison, String value) {
this(field, property, comparison, value, null);
}
public IndexConstraint(String field, Property property,
Comparison comparison, String value, String optionalFormat) {
this.field = Objects.requireNonNull(field);
this.property = Objects.requireNonNull(property);
this.comparison = Objects.requireNonNull(comparison);
this.value = Objects.requireNonNull(value);
this.optionalFormat = optionalFormat;
}
/**
* @return On what field the constraint is going to be applied on
*/
public String getField() {
return field;
}
/**
* @return How to compare the specified value against the field property (lt, lte, gt and gte)
*/
public Comparison getComparison() {
return comparison;
}
/**
* @return On what property of a field the constraint is going to be applied on (min or max value)
*/
public Property getProperty() {
return property;
}
/**
* @return The value to compare against
*/
public String getValue() {
return value;
}
/**
* @return An optional format, that specifies how the value string is converted in the native value of the field.
* Not all field types support this and right now only date field supports this option.
*/
public String getOptionalFormat() {
return optionalFormat;
}
public enum Property {
MIN((byte) 0),
MAX((byte) 1);
private final byte id;
Property(byte id) {
this.id = id;
}
public byte getId() {
return id;
}
public static Property read(byte id) {
switch (id) {
case 0:
return MIN;
case 1:
return MAX;
default:
throw new IllegalArgumentException("Unknown property [" + id + "]");
}
}
public static Property parse(String value) {
value = value.toLowerCase(Locale.ROOT);
switch (value) {
case "min_value":
return MIN;
case "max_value":
return MAX;
default:
throw new IllegalArgumentException("Unknown property [" + value + "]");
}
}
}
public enum Comparison {
LT((byte) 0),
LTE((byte) 1),
GT((byte) 2),
GTE((byte) 3);
private final byte id;
Comparison(byte id) {
this.id = id;
}
public byte getId() {
return id;
}
public static Comparison read(byte id) {
switch (id) {
case 0:
return LT;
case 1:
return LTE;
case 2:
return GT;
case 3:
return GTE;
default:
throw new IllegalArgumentException("Unknown comparison [" + id + "]");
}
}
public static Comparison parse(String value) {
value = value.toLowerCase(Locale.ROOT);
switch (value) {
case "lt":
return LT;
case "lte":
return LTE;
case "gt":
return GT;
case "gte":
return GTE;
default:
throw new IllegalArgumentException("Unknown comparison [" + value + "]");
}
}
}
}

View File

@ -1,223 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReferenceArray;
public class TransportFieldStatsAction extends
TransportBroadcastAction<FieldStatsRequest, FieldStatsResponse, FieldStatsShardRequest, FieldStatsShardResponse> {
private final IndicesService indicesService;
@Inject
public TransportFieldStatsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
IndicesService indicesService) {
super(settings, FieldStatsAction.NAME, threadPool, clusterService, transportService,
actionFilters, indexNameExpressionResolver, FieldStatsRequest::new,
FieldStatsShardRequest::new, ThreadPool.Names.MANAGEMENT);
this.indicesService = indicesService;
}
@Override
protected FieldStatsResponse newResponse(FieldStatsRequest request, AtomicReferenceArray shardsResponses,
ClusterState clusterState) {
int successfulShards = 0;
int failedShards = 0;
Map<String, String> conflicts = new HashMap<>();
Map<String, Map<String, FieldStats>> indicesMergedFieldStats = new HashMap<>();
List<ShardOperationFailedException> shardFailures = new ArrayList<>();
for (int i = 0; i < shardsResponses.length(); i++) {
Object shardValue = shardsResponses.get(i);
if (shardValue == null) {
// simply ignore non active shards
} else if (shardValue instanceof BroadcastShardOperationFailedException) {
failedShards++;
shardFailures.add(
new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardValue)
);
} else {
successfulShards++;
FieldStatsShardResponse shardResponse = (FieldStatsShardResponse) shardValue;
final String indexName;
if ("cluster".equals(request.level())) {
indexName = "_all";
} else if ("indices".equals(request.level())) {
indexName = shardResponse.getIndex();
} else {
// should already have been caught by the FieldStatsRequest#validate(...)
throw new IllegalArgumentException("Illegal level option [" + request.level() + "]");
}
Map<String, FieldStats> indexMergedFieldStats = indicesMergedFieldStats.get(indexName);
if (indexMergedFieldStats == null) {
indicesMergedFieldStats.put(indexName, indexMergedFieldStats = new HashMap<>());
}
Map<String, FieldStats<?>> fieldStats = shardResponse.getFieldStats();
for (Map.Entry<String, FieldStats<?>> entry : fieldStats.entrySet()) {
FieldStats<?> existing = indexMergedFieldStats.get(entry.getKey());
if (existing != null) {
if (existing.getType() != entry.getValue().getType()) {
if (conflicts.containsKey(entry.getKey()) == false) {
FieldStats[] fields = new FieldStats[] {entry.getValue(), existing};
Arrays.sort(fields, (o1, o2) -> Byte.compare(o1.getType(), o2.getType()));
conflicts.put(entry.getKey(),
"Field [" + entry.getKey() + "] of type [" +
fields[0].getDisplayType() +
"] conflicts with existing field of type [" +
fields[1].getDisplayType() +
"] in other index.");
}
} else {
existing.accumulate(entry.getValue());
}
} else {
indexMergedFieldStats.put(entry.getKey(), entry.getValue());
}
}
}
// Check the field with conflicts and remove them.
for (String conflictKey : conflicts.keySet()) {
Iterator<Map.Entry<String, Map<String, FieldStats>>> iterator =
indicesMergedFieldStats.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, Map<String, FieldStats>> entry = iterator.next();
if (entry.getValue().containsKey(conflictKey)) {
entry.getValue().remove(conflictKey);
}
}
}
}
if (request.getIndexConstraints().length != 0) {
Set<String> fieldStatFields = new HashSet<>(Arrays.asList(request.getFields()));
for (IndexConstraint indexConstraint : request.getIndexConstraints()) {
Iterator<Map.Entry<String, Map<String, FieldStats>>> iterator =
indicesMergedFieldStats.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, Map<String, FieldStats>> entry = iterator.next();
FieldStats indexConstraintFieldStats = entry.getValue().get(indexConstraint.getField());
if (indexConstraintFieldStats != null && indexConstraintFieldStats.match(indexConstraint)) {
// If the field stats didn't occur in the list of fields in the original request
// we need to remove the field stats, because it was never requested and was only needed to
// validate the index constraint.
if (fieldStatFields.contains(indexConstraint.getField()) == false) {
entry.getValue().remove(indexConstraint.getField());
}
} else {
// The index constraint didn't match or was empty,
// so we remove all the field stats of the index we're checking.
iterator.remove();
}
}
}
}
return new FieldStatsResponse(shardsResponses.length(), successfulShards, failedShards,
shardFailures, indicesMergedFieldStats, conflicts);
}
@Override
protected FieldStatsShardRequest newShardRequest(int numShards, ShardRouting shard, FieldStatsRequest request) {
return new FieldStatsShardRequest(shard.shardId(), request);
}
@Override
protected FieldStatsShardResponse newShardResponse() {
return new FieldStatsShardResponse();
}
@Override
protected FieldStatsShardResponse shardOperation(FieldStatsShardRequest request) {
ShardId shardId = request.shardId();
Map<String, FieldStats<?>> fieldStats = new HashMap<>();
IndexService indexServices = indicesService.indexServiceSafe(shardId.getIndex());
IndexShard shard = indexServices.getShard(shardId.id());
try (Engine.Searcher searcher = shard.acquireSearcher("fieldstats")) {
// Resolve patterns and deduplicate
Set<String> fieldNames = new HashSet<>();
for (String field : request.getFields()) {
fieldNames.addAll(shard.mapperService().simpleMatchToIndexNames(field));
}
for (String field : fieldNames) {
FieldStats<?> stats = indicesService.getFieldStats(shard, searcher, field, request.shouldUseCache());
if (stats != null) {
fieldStats.put(field, stats);
}
}
} catch (Exception e) {
throw ExceptionsHelper.convertToElastic(e);
}
return new FieldStatsShardResponse(shardId, fieldStats);
}
@Override
protected GroupShardsIterator shards(ClusterState clusterState, FieldStatsRequest request,
String[] concreteIndices) {
return clusterService.operationRouting().searchShards(clusterState, concreteIndices, null, null);
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, FieldStatsRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, FieldStatsRequest request,
String[] concreteIndices) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices);
}
}

View File

@ -30,13 +30,9 @@ import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainRequestBuilder;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequestBuilder;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.action.fieldstats.FieldStatsRequest;
import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
@ -456,24 +452,6 @@ public interface Client extends ElasticsearchClient, Releasable {
*/
void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener);
/**
* @deprecated Use _field_caps instead or run a min/max aggregations on the desired fields
*/
@Deprecated
FieldStatsRequestBuilder prepareFieldStats();
/**
* @deprecated Use _field_caps instead or run a min/max aggregations on the desired fields
*/
@Deprecated
ActionFuture<FieldStatsResponse> fieldStats(FieldStatsRequest request);
/**
* @deprecated Use _field_caps instead or run a min/max aggregations on the desired fields
*/
@Deprecated
void fieldStats(FieldStatsRequest request, ActionListener<FieldStatsResponse> listener);
/**
* Builder for the field capabilities request.
*/

View File

@ -280,10 +280,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequestBuilder;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
import org.elasticsearch.action.fieldstats.FieldStatsRequest;
import org.elasticsearch.action.fieldstats.FieldStatsRequestBuilder;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.get.GetAction;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetRequestBuilder;
@ -660,21 +656,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client
return new ClearScrollRequestBuilder(this, ClearScrollAction.INSTANCE);
}
@Override
public void fieldStats(FieldStatsRequest request, ActionListener<FieldStatsResponse> listener) {
execute(FieldStatsAction.INSTANCE, request, listener);
}
@Override
public ActionFuture<FieldStatsResponse> fieldStats(FieldStatsRequest request) {
return execute(FieldStatsAction.INSTANCE, request);
}
@Override
public FieldStatsRequestBuilder prepareFieldStats() {
return new FieldStatsRequestBuilder(this, FieldStatsAction.INSTANCE);
}
@Override
public void fieldCaps(FieldCapabilitiesRequest request, ActionListener<FieldCapabilitiesResponse> listener) {
execute(FieldCapabilitiesAction.INSTANCE, request, listener);

View File

@ -19,19 +19,17 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.joda.DateMathParser;
@ -54,6 +52,7 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
/** A {@link FieldMapper} for ip addresses. */
@ -306,25 +305,6 @@ public class DateFieldMapper extends FieldMapper {
return dateParser.parse(strValue, context::nowInMillis, roundUp, zone);
}
@Override
public FieldStats.Date stats(IndexReader reader) throws IOException {
String field = name();
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
if (fi == null) {
return null;
}
long size = PointValues.size(reader, field);
if (size == 0) {
return new FieldStats.Date(reader.maxDoc(), 0, -1, -1, isSearchable(), isAggregatable());
}
int docCount = PointValues.getDocCount(reader, field);
byte[] min = PointValues.getMinPackedValue(reader, field);
byte[] max = PointValues.getMaxPackedValue(reader, field);
return new FieldStats.Date(reader.maxDoc(),docCount, -1L, size,
isSearchable(), isAggregatable(),
dateTimeFormatter(), LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
}
@Override
public Relation isFieldWithinQuery(IndexReader reader,
Object from, Object to, boolean includeLower, boolean includeUpper,

View File

@ -21,15 +21,10 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
@ -190,26 +185,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead: ["
+ name() + "]");
}
@Override
public FieldStats.GeoPoint stats(IndexReader reader) throws IOException {
String field = name();
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
if (fi == null) {
return null;
}
final long size = PointValues.size(reader, field);
if (size == 0) {
return new FieldStats.GeoPoint(reader.maxDoc(), -1L, -1L, -1L, isSearchable(), isAggregatable());
}
final int docCount = PointValues.getDocCount(reader, field);
byte[] min = PointValues.getMinPackedValue(reader, field);
byte[] max = PointValues.getMaxPackedValue(reader, field);
GeoPoint minPt = new GeoPoint(GeoEncodingUtils.decodeLatitude(min, 0), GeoEncodingUtils.decodeLongitude(min, Integer.BYTES));
GeoPoint maxPt = new GeoPoint(GeoEncodingUtils.decodeLatitude(max, 0), GeoEncodingUtils.decodeLongitude(max, Integer.BYTES));
return new FieldStats.GeoPoint(reader.maxDoc(), docCount, -1L, size, isSearchable(), isAggregatable(),
minPt, maxPt);
}
}
protected void parse(ParseContext originalContext, GeoPoint point) throws IOException {

View File

@ -19,9 +19,7 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
@ -31,7 +29,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.SpatialStrategy;
@ -413,20 +410,6 @@ public class GeoShapeFieldMapper extends FieldMapper {
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead");
}
@Override
public FieldStats stats(IndexReader reader) throws IOException {
int maxDoc = reader.maxDoc();
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
if (fi == null) {
return null;
}
/**
* we don't have a specific type for geo_shape so we use an empty {@link FieldStats.Text}.
* TODO: we should maybe support a new type that knows how to (de)encode the min/max information
*/
return new FieldStats.Text(maxDoc, -1, -1, -1, isSearchable(), isAggregatable());
}
}
protected Explicit<Boolean> coerce;

View File

@ -22,17 +22,13 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.network.InetAddresses;
@ -213,25 +209,6 @@ public class IpFieldMapper extends FieldMapper {
return InetAddressPoint.newRangeQuery(name(), lower, upper);
}
@Override
public FieldStats.Ip stats(IndexReader reader) throws IOException {
String field = name();
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
if (fi == null) {
return null;
}
long size = PointValues.size(reader, field);
if (size == 0) {
return new FieldStats.Ip(reader.maxDoc(), 0, -1, -1, isSearchable(), isAggregatable());
}
int docCount = PointValues.getDocCount(reader, field);
byte[] min = PointValues.getMinPackedValue(reader, field);
byte[] max = PointValues.getMaxPackedValue(reader, field);
return new FieldStats.Ip(reader.maxDoc(), docCount, -1L, size,
isSearchable(), isAggregatable(),
InetAddressPoint.decode(min), InetAddressPoint.decode(max));
}
public static final class IpScriptDocValues extends ScriptDocValues<String> {
private final SortedSetDocValues in;

View File

@ -20,14 +20,11 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.PrefixCodedTerms;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
@ -37,7 +34,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.lucene.all.AllTermQuery;
@ -373,26 +369,6 @@ public abstract class MappedFieldType extends FieldType {
return new ConstantScoreQuery(termQuery(nullValue, null));
}
/**
* @return a {@link FieldStats} instance that maps to the type of this
* field or {@code null} if the provided index has no stats about the
* current field
*/
public FieldStats stats(IndexReader reader) throws IOException {
int maxDoc = reader.maxDoc();
FieldInfo fi = MultiFields.getMergedFieldInfos(reader).fieldInfo(name());
if (fi == null) {
return null;
}
Terms terms = MultiFields.getTerms(reader, name());
if (terms == null) {
return new FieldStats.Text(maxDoc, 0, -1, -1, isSearchable(), isAggregatable());
}
FieldStats stats = new FieldStats.Text(maxDoc, terms.getDocCount(),
terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), terms.getMin(), terms.getMax());
return stats;
}
/**
* An enum used to describe the relation between the range of terms in a
* shard when compared with a query range

View File

@ -27,18 +27,14 @@ import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Setting;
@ -235,25 +231,6 @@ public class NumberFieldMapper extends FieldMapper {
}
return fields;
}
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
if (fi == null) {
return null;
}
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Double(reader.maxDoc(), docCount, -1L, size,
isSearchable, isAggregatable,
HalfFloatPoint.decodeDimension(min, 0), HalfFloatPoint.decodeDimension(max, 0));
}
},
FLOAT("float", NumericType.FLOAT) {
@Override
@ -331,25 +308,6 @@ public class NumberFieldMapper extends FieldMapper {
}
return fields;
}
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
if (fi == null) {
return null;
}
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Double(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
FloatPoint.decodeDimension(min, 0), FloatPoint.decodeDimension(max, 0));
}
},
DOUBLE("double", NumericType.DOUBLE) {
@Override
@ -427,25 +385,6 @@ public class NumberFieldMapper extends FieldMapper {
}
return fields;
}
@Override
FieldStats.Double stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
if (fi == null) {
return null;
}
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Double(reader.maxDoc(),0, -1, -1, isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
DoublePoint.decodeDimension(min, 0), DoublePoint.decodeDimension(max, 0));
}
},
BYTE("byte", NumericType.BYTE) {
@Override
@ -498,12 +437,6 @@ public class NumberFieldMapper extends FieldMapper {
return INTEGER.createFields(name, value, indexed, docValued, stored);
}
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
return (FieldStats.Long) INTEGER.stats(reader, fieldName, isSearchable, isAggregatable);
}
@Override
Number valueForSearch(Number value) {
return value.byteValue();
@ -560,12 +493,6 @@ public class NumberFieldMapper extends FieldMapper {
return INTEGER.createFields(name, value, indexed, docValued, stored);
}
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
return (FieldStats.Long) INTEGER.stats(reader, fieldName, isSearchable, isAggregatable);
}
@Override
Number valueForSearch(Number value) {
return value.shortValue();
@ -681,25 +608,6 @@ public class NumberFieldMapper extends FieldMapper {
}
return fields;
}
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
if (fi == null) {
return null;
}
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
IntPoint.decodeDimension(min, 0), IntPoint.decodeDimension(max, 0));
}
},
LONG("long", NumericType.LONG) {
@Override
@ -811,25 +719,6 @@ public class NumberFieldMapper extends FieldMapper {
}
return fields;
}
@Override
FieldStats.Long stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException {
FieldInfo fi = org.apache.lucene.index.MultiFields.getMergedFieldInfos(reader).fieldInfo(fieldName);
if (fi == null) {
return null;
}
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return new FieldStats.Long(reader.maxDoc(), 0, -1, -1, isSearchable, isAggregatable);
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size,
isSearchable, isAggregatable,
LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
}
};
private final String name;
@ -857,8 +746,6 @@ public class NumberFieldMapper extends FieldMapper {
abstract Number parse(Object value, boolean coerce);
public abstract List<Field> createFields(String name, Number value, boolean indexed,
boolean docValued, boolean stored);
abstract FieldStats<? extends Number> stats(IndexReader reader, String fieldName,
boolean isSearchable, boolean isAggregatable) throws IOException;
Number valueForSearch(Number value) {
return value;
}
@ -953,11 +840,6 @@ public class NumberFieldMapper extends FieldMapper {
return query;
}
@Override
public FieldStats stats(IndexReader reader) throws IOException {
return type.stats(reader, name(), isSearchable(), isAggregatable());
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();

View File

@ -20,16 +20,14 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Setting;
@ -260,25 +258,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
return query;
}
@Override
public FieldStats<?> stats(IndexReader reader) throws IOException {
FieldStats.Long stats = (FieldStats.Long) NumberFieldMapper.NumberType.LONG.stats(
reader, name(), isSearchable(), isAggregatable());
if (stats == null) {
return null;
}
if (stats.hasMinMax()) {
return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(),
stats.getSumDocFreq(), stats.getSumTotalTermFreq(),
stats.isSearchable(), stats.isAggregatable(),
stats.getMinValue() / scalingFactor,
stats.getMaxValue() / scalingFactor);
}
return new FieldStats.Double(stats.getMaxDoc(), stats.getDocCount(),
stats.getSumDocFreq(), stats.getSumTotalTermFreq(),
stats.isSearchable(), stats.isAggregatable());
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();

View File

@ -23,13 +23,10 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -212,20 +209,6 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
}
@Override
public FieldStats stats(IndexReader reader) throws IOException {
String fieldName = name();
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return null;
}
int docCount = PointValues.getDocCount(reader, fieldName);
byte[] min = PointValues.getMinPackedValue(reader, fieldName);
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return new FieldStats.Long(reader.maxDoc(),docCount, -1L, size, true, false,
LongPoint.decodeDimension(min, 0), LongPoint.decodeDimension(max, 0));
}
}
public SeqNoFieldMapper(Settings indexSettings) {

View File

@ -21,9 +21,9 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.BooleanClause;
@ -32,18 +32,16 @@ import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.ConstantIndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
@ -129,15 +127,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
}
}
@Override
public FieldStats<?> stats(IndexReader reader) throws IOException {
if (reader.maxDoc() == 0) {
return null;
}
return new FieldStats.Text(reader.maxDoc(), reader.numDocs(), reader.maxDoc(), reader.maxDoc(),
isSearchable(), isAggregatable());
}
@Override
public boolean isSearchable() {
return true;

View File

@ -34,7 +34,6 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag;
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
@ -46,7 +45,6 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.io.FileSystemUtils;
@ -81,11 +79,9 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.cache.request.ShardRequestCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.query.QueryBuilder;
@ -1154,34 +1150,6 @@ public class IndicesService extends AbstractLifecycleComponent
}
}
/**
* Fetch {@linkplain FieldStats} for a field. These stats are cached until the shard changes.
* @param shard the shard to use with the cache key
* @param searcher searcher to use to lookup the field stats
* @param field the actual field
* @param useCache should this request use the cache?
*/
public FieldStats<?> getFieldStats(IndexShard shard, Engine.Searcher searcher, String field, boolean useCache) throws Exception {
MappedFieldType fieldType = shard.mapperService().fullName(field);
if (fieldType == null) {
return null;
}
if (useCache == false) {
return fieldType.stats(searcher.reader());
}
BytesReference cacheKey = new BytesArray("fieldstats:" + field);
BytesReference statsRef = cacheShardLevelResult(shard, searcher.getDirectoryReader(), cacheKey, out -> {
try {
out.writeOptionalWriteable(fieldType.stats(searcher.reader()));
} catch (IOException e) {
throw new IllegalStateException("Failed to write field stats output", e);
}
});
try (StreamInput in = statsRef.streamInput()) {
return in.readOptionalWriteable(FieldStats::readFrom);
}
}
public ByteSizeValue getTotalIndexingBufferBytes() {
return indexingMemoryController.indexingBufferSize();
}

View File

@ -1,119 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.fieldstats.FieldStatsRequest;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.action.RestActions.buildBroadcastShardsHeader;
public class RestFieldStatsAction extends BaseRestHandler {
public RestFieldStatsAction(Settings settings, RestController controller) {
super(settings);
controller.registerAsDeprecatedHandler(GET, "/_field_stats", this,
deprecationMessage(), deprecationLogger);
controller.registerAsDeprecatedHandler(POST, "/_field_stats", this,
deprecationMessage(), deprecationLogger);
controller.registerAsDeprecatedHandler(GET, "/{index}/_field_stats", this,
deprecationMessage(), deprecationLogger);
controller.registerAsDeprecatedHandler(POST, "/{index}/_field_stats", this,
deprecationMessage(), deprecationLogger);
}
static String deprecationMessage() {
return "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or " +
"run a min/max aggregations on the desired fields.";
}
@Override
public String getName() {
return "field_stats_action";
}
@Override
public RestChannelConsumer prepareRequest(final RestRequest request,
final NodeClient client) throws IOException {
if (request.hasContentOrSourceParam() && request.hasParam("fields")) {
throw new IllegalArgumentException("can't specify a request body and [fields] request parameter, " +
"either specify a request body or the [fields] request parameter");
}
final FieldStatsRequest fieldStatsRequest = new FieldStatsRequest();
fieldStatsRequest.indices(Strings.splitStringByCommaToArray(request.param("index")));
fieldStatsRequest.indicesOptions(IndicesOptions.fromRequest(request, fieldStatsRequest.indicesOptions()));
fieldStatsRequest.level(request.param("level", FieldStatsRequest.DEFAULT_LEVEL));
if (request.hasContentOrSourceParam()) {
try (XContentParser parser = request.contentOrSourceParamParser()) {
fieldStatsRequest.source(parser);
}
} else {
fieldStatsRequest.setFields(Strings.splitStringByCommaToArray(request.param("fields")));
}
return channel -> client.fieldStats(fieldStatsRequest, new RestBuilderListener<FieldStatsResponse>(channel) {
@Override
public RestResponse buildResponse(FieldStatsResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
buildBroadcastShardsHeader(builder, request, response);
builder.startObject("indices");
for (Map.Entry<String, Map<String, FieldStats>> entry1 :
response.getIndicesMergedFieldStats().entrySet()) {
builder.startObject(entry1.getKey());
builder.startObject("fields");
for (Map.Entry<String, FieldStats> entry2 : entry1.getValue().entrySet()) {
builder.field(entry2.getKey());
entry2.getValue().toXContent(builder, request);
}
builder.endObject();
builder.endObject();
}
builder.endObject();
if (response.getConflicts().size() > 0) {
builder.startObject("conflicts");
for (Map.Entry<String, String> entry : response.getConflicts().entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
}
builder.endObject();
return new BytesRestResponse(RestStatus.OK, builder);
}
});
}
}

View File

@ -34,7 +34,6 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexResponse;
@ -168,20 +167,22 @@ public class TasksIT extends ESIntegTestCase {
}
public void testTransportReplicationAllShardsTasks() {
registerTaskManageListeners(FieldStatsAction.NAME); // main task
registerTaskManageListeners(FieldStatsAction.NAME + "[s]"); // shard level tasks
registerTaskManageListeners(ValidateQueryAction.NAME); // main task
registerTaskManageListeners(ValidateQueryAction.NAME + "[s]"); // shard
// level
// tasks
createIndex("test");
ensureGreen("test"); // Make sure all shards are allocated
client().prepareFieldStats().setFields("field").get();
client().admin().indices().prepareValidateQuery("test").setAllShards(true).get();
// the field stats operation should produce one main task
NumShards numberOfShards = getNumShards("test");
assertEquals(1, numberOfEvents(FieldStatsAction.NAME, Tuple::v1));
assertEquals(1, numberOfEvents(ValidateQueryAction.NAME, Tuple::v1));
// and then one operation per shard
assertEquals(numberOfShards.numPrimaries, numberOfEvents(FieldStatsAction.NAME + "[s]", Tuple::v1));
assertEquals(numberOfShards.numPrimaries, numberOfEvents(ValidateQueryAction.NAME + "[s]", Tuple::v1));
// the shard level tasks should have the main task as a parent
assertParentTask(findEvents(FieldStatsAction.NAME + "[s]", Tuple::v1), findEvents(FieldStatsAction.NAME, Tuple::v1).get(0));
assertParentTask(findEvents(ValidateQueryAction.NAME + "[s]", Tuple::v1), findEvents(ValidateQueryAction.NAME, Tuple::v1).get(0));
}
public void testTransportBroadcastByNodeTasks() {

View File

@ -1,126 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldstats;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.fieldstats.FieldStatsTests;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.StreamsUtils;
import org.elasticsearch.test.VersionUtils;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GT;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN;
import static org.hamcrest.Matchers.equalTo;
public class FieldStatsRequestTests extends ESTestCase {
public void testFieldsParsing() throws Exception {
BytesArray data = new BytesArray(
StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/" +
"fieldstats-index-constraints-request.json"));
FieldStatsRequest request = new FieldStatsRequest();
request.source(createParser(JsonXContent.jsonXContent, data));
assertThat(request.getFields().length, equalTo(5));
assertThat(request.getFields()[0], equalTo("field1"));
assertThat(request.getFields()[1], equalTo("field2"));
assertThat(request.getFields()[2], equalTo("field3"));
assertThat(request.getFields()[3], equalTo("field4"));
assertThat(request.getFields()[4], equalTo("field5"));
assertThat(request.getIndexConstraints().length, equalTo(8));
assertThat(request.getIndexConstraints()[0].getField(), equalTo("field2"));
assertThat(request.getIndexConstraints()[0].getValue(), equalTo("9"));
assertThat(request.getIndexConstraints()[0].getProperty(), equalTo(MAX));
assertThat(request.getIndexConstraints()[0].getComparison(), equalTo(GTE));
assertThat(request.getIndexConstraints()[1].getField(), equalTo("field3"));
assertThat(request.getIndexConstraints()[1].getValue(), equalTo("5"));
assertThat(request.getIndexConstraints()[1].getProperty(), equalTo(MIN));
assertThat(request.getIndexConstraints()[1].getComparison(), equalTo(GT));
assertThat(request.getIndexConstraints()[2].getField(), equalTo("field4"));
assertThat(request.getIndexConstraints()[2].getValue(), equalTo("a"));
assertThat(request.getIndexConstraints()[2].getProperty(), equalTo(MIN));
assertThat(request.getIndexConstraints()[2].getComparison(), equalTo(GTE));
assertThat(request.getIndexConstraints()[3].getField(), equalTo("field4"));
assertThat(request.getIndexConstraints()[3].getValue(), equalTo("g"));
assertThat(request.getIndexConstraints()[3].getProperty(), equalTo(MAX));
assertThat(request.getIndexConstraints()[3].getComparison(), equalTo(LTE));
assertThat(request.getIndexConstraints()[4].getField(), equalTo("field5"));
assertThat(request.getIndexConstraints()[4].getValue(), equalTo("2"));
assertThat(request.getIndexConstraints()[4].getProperty(), equalTo(MIN));
assertThat(request.getIndexConstraints()[4].getComparison(), equalTo(GT));
assertThat(request.getIndexConstraints()[5].getField(), equalTo("field5"));
assertThat(request.getIndexConstraints()[5].getValue(), equalTo("9"));
assertThat(request.getIndexConstraints()[5].getProperty(), equalTo(MAX));
assertThat(request.getIndexConstraints()[5].getComparison(), equalTo(LT));
assertThat(request.getIndexConstraints()[6].getField(), equalTo("field1"));
assertThat(request.getIndexConstraints()[6].getValue(), equalTo("2014-01-01"));
assertThat(request.getIndexConstraints()[6].getProperty(), equalTo(MIN));
assertThat(request.getIndexConstraints()[6].getComparison(), equalTo(GTE));
assertThat(request.getIndexConstraints()[6].getOptionalFormat(), equalTo("date_optional_time"));
assertThat(request.getIndexConstraints()[7].getField(), equalTo("field1"));
assertThat(request.getIndexConstraints()[7].getValue(), equalTo("2015-01-01"));
assertThat(request.getIndexConstraints()[7].getProperty(), equalTo(MAX));
assertThat(request.getIndexConstraints()[7].getComparison(), equalTo(LT));
assertThat(request.getIndexConstraints()[7].getOptionalFormat(), equalTo("date_optional_time"));
}
public void testFieldStatsBWC() throws Exception {
int size = randomIntBetween(5, 20);
Map<String, FieldStats<?> > stats = new HashMap<> ();
for (int i = 0; i < size; i++) {
stats.put(Integer.toString(i), FieldStatsTests.randomFieldStats(true));
}
FieldStatsShardResponse response = new FieldStatsShardResponse(new ShardId("test", "test", 0), stats);
for (int i = 0; i < 10; i++) {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT);
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(version);
response.writeTo(output);
output.flush();
StreamInput input = output.bytes().streamInput();
input.setVersion(version);
FieldStatsShardResponse deserialized = new FieldStatsShardResponse();
deserialized.readFrom(input);
final Map<String, FieldStats<?>> expected;
if (version.before(Version.V_5_2_0)) {
expected = deserialized.filterNullMinMax();
} else {
expected = deserialized.getFieldStats();
}
assertEquals(expected.size(), deserialized.getFieldStats().size());
assertThat(expected, equalTo(deserialized.getFieldStats()));
}
}
}

View File

@ -1,571 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.fieldstats;
import org.apache.lucene.document.HalfFloatPoint;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.fieldstats.IndexConstraint;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.cache.request.RequestCacheStats;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.geo.RandomGeoGenerator;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.nullValue;
/**
* Tests for the {@link FieldStatsAction}.
*/
public class FieldStatsIntegrationIT extends ESIntegTestCase {
public void testRandom() throws Exception {
assertAcked(prepareCreate("test").addMapping(
"test",
"string", "type=text",
"date", "type=date",
"double", "type=double",
"half_float", "type=half_float",
"float", "type=float",
"long", "type=long",
"integer", "type=integer",
"short", "type=short",
"byte", "type=byte",
"location", "type=geo_point"));
ensureGreen("test");
// index=false
assertAcked(prepareCreate("test1").addMapping(
"test",
"string", "type=text,index=false",
"date", "type=date,index=false",
"double", "type=double,index=false",
"half_float", "type=half_float",
"float", "type=float,index=false",
"long", "type=long,index=false",
"integer", "type=integer,index=false",
"short", "type=short,index=false",
"byte", "type=byte,index=false",
"location", "type=geo_point,index=false"
));
ensureGreen("test1");
// no value indexed
assertAcked(prepareCreate("test3").addMapping(
"test",
"string", "type=text,index=false",
"date", "type=date,index=false",
"double", "type=double,index=false",
"half_float", "type=half_float",
"float", "type=float,index=false",
"long", "type=long,index=false",
"integer", "type=integer,index=false",
"short", "type=short,index=false",
"byte", "type=byte,index=false",
"location", "type=geo_point,index=false"
));
ensureGreen("test3");
long minByte = Byte.MAX_VALUE;
long maxByte = Byte.MIN_VALUE;
long minShort = Short.MAX_VALUE;
long maxShort = Short.MIN_VALUE;
long minInt = Integer.MAX_VALUE;
long maxInt = Integer.MIN_VALUE;
long minLong = Long.MAX_VALUE;
long maxLong = Long.MIN_VALUE;
double minHalfFloat = Double.POSITIVE_INFINITY;
double maxHalfFloat = Double.NEGATIVE_INFINITY;
double minFloat = Double.POSITIVE_INFINITY;
double maxFloat = Double.NEGATIVE_INFINITY;
double minDouble = Double.POSITIVE_INFINITY;
double maxDouble = Double.NEGATIVE_INFINITY;
GeoPoint minLoc = new GeoPoint(90, 180);
GeoPoint maxLoc = new GeoPoint(-90, -180);
String minString = new String(Character.toChars(1114111));
String maxString = "0";
int numDocs = scaledRandomIntBetween(128, 1024);
List<IndexRequestBuilder> request = new ArrayList<>(numDocs);
for (int doc = 0; doc < numDocs; doc++) {
byte b = randomByte();
minByte = Math.min(minByte, b);
maxByte = Math.max(maxByte, b);
short s = randomShort();
minShort = Math.min(minShort, s);
maxShort = Math.max(maxShort, s);
int i = randomInt();
minInt = Math.min(minInt, i);
maxInt = Math.max(maxInt, i);
long l = randomLong();
minLong = Math.min(minLong, l);
maxLong = Math.max(maxLong, l);
float hf = randomFloat();
hf = HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(hf));
minHalfFloat = Math.min(minHalfFloat, hf);
maxHalfFloat = Math.max(maxHalfFloat, hf);
float f = randomFloat();
minFloat = Math.min(minFloat, f);
maxFloat = Math.max(maxFloat, f);
double d = randomDouble();
minDouble = Math.min(minDouble, d);
maxDouble = Math.max(maxDouble, d);
GeoPoint loc = RandomGeoGenerator.randomPoint(random());
minLoc.reset(Math.min(loc.lat(), minLoc.lat()), Math.min(loc.lon(), minLoc.lon()));
maxLoc.reset(Math.max(loc.lat(), maxLoc.lat()), Math.max(loc.lon(), maxLoc.lon()));
String str = randomRealisticUnicodeOfLength(3);
if (str.compareTo(minString) < 0) {
minString = str;
}
if (str.compareTo(maxString) > 0) {
maxString = str;
}
request.add(client().prepareIndex("test", "test", Integer.toString(doc))
.setSource("byte", b,
"short", s,
"integer", i,
"long", l,
"half_float", hf,
"float", f,
"double", d,
"location", loc,
"string", str)
);
}
indexRandom(true, false, request);
FieldStatsResponse response = client()
.prepareFieldStats()
.setFields("byte", "short", "integer", "long", "half_float", "float", "double", "location", "string").get();
assertAllSuccessful(response);
for (FieldStats<?> stats : response.getAllFieldStats().values()) {
assertThat(stats.getMaxDoc(), equalTo((long) numDocs));
assertThat(stats.getDocCount(), equalTo((long) numDocs));
assertThat(stats.getDensity(), equalTo(100));
}
assertThat(response.getAllFieldStats().get("byte").getMinValue(), equalTo(minByte));
assertThat(response.getAllFieldStats().get("byte").getMaxValue(), equalTo(maxByte));
assertThat(response.getAllFieldStats().get("byte").getDisplayType(), equalTo("integer"));
assertThat(response.getAllFieldStats().get("short").getMinValue(), equalTo(minShort));
assertThat(response.getAllFieldStats().get("short").getMaxValue(), equalTo(maxShort));
assertThat(response.getAllFieldStats().get("short").getDisplayType(), equalTo("integer"));
assertThat(response.getAllFieldStats().get("integer").getMinValue(), equalTo(minInt));
assertThat(response.getAllFieldStats().get("integer").getMaxValue(), equalTo(maxInt));
assertThat(response.getAllFieldStats().get("integer").getDisplayType(), equalTo("integer"));
assertThat(response.getAllFieldStats().get("long").getMinValue(), equalTo(minLong));
assertThat(response.getAllFieldStats().get("long").getMaxValue(), equalTo(maxLong));
assertThat(response.getAllFieldStats().get("long").getDisplayType(), equalTo("integer"));
assertThat(response.getAllFieldStats().get("half_float").getMinValue(), equalTo(minHalfFloat));
assertThat(response.getAllFieldStats().get("half_float").getMaxValue(), equalTo(maxHalfFloat));
assertThat(response.getAllFieldStats().get("half_float").getDisplayType(), equalTo("float"));
assertThat(response.getAllFieldStats().get("float").getMinValue(), equalTo(minFloat));
assertThat(response.getAllFieldStats().get("float").getMaxValue(), equalTo(maxFloat));
assertThat(response.getAllFieldStats().get("float").getDisplayType(), equalTo("float"));
assertThat(response.getAllFieldStats().get("double").getMinValue(), equalTo(minDouble));
assertThat(response.getAllFieldStats().get("double").getMaxValue(), equalTo(maxDouble));
assertThat(response.getAllFieldStats().get("double").getDisplayType(), equalTo("float"));
assertThat(((GeoPoint)response.getAllFieldStats().get("location").getMinValue()).lat(), closeTo(minLoc.lat(), 1E-5));
assertThat(((GeoPoint)response.getAllFieldStats().get("location").getMinValue()).lon(), closeTo(minLoc.lon(), 1E-5));
assertThat(((GeoPoint)response.getAllFieldStats().get("location").getMaxValue()).lat(), closeTo(maxLoc.lat(), 1E-5));
assertThat(((GeoPoint)response.getAllFieldStats().get("location").getMaxValue()).lon(), closeTo(maxLoc.lon(), 1E-5));
assertThat(response.getAllFieldStats().get("location").getDisplayType(), equalTo("geo_point"));
}
public void testFieldStatsIndexLevel() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test3").addMapping(
"test", "value", "type=long"
));
ensureGreen("test1", "test2", "test3");
indexRange("test1", -10, 100);
indexRange("test2", 101, 200);
indexRange("test3", 201, 300);
// default:
FieldStatsResponse response = client().prepareFieldStats().setFields("value").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10L));
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getDisplayType(),
equalTo("integer"));
// Level: cluster
response = client().prepareFieldStats().setFields("value").setLevel("cluster").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10L));
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getDisplayType(),
equalTo("integer"));
// Level: indices
response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(3));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getDisplayType(),
equalTo("integer"));
// Illegal level option:
try {
client().prepareFieldStats().setFields("value").setLevel("illegal").get();
fail();
} catch (ActionRequestValidationException e) {
assertThat(e.getMessage(), equalTo("Validation Failed: 1: invalid level option [illegal];"));
}
}
public void testIncompatibleFieldTypesSingleField() {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=text"
));
ensureGreen("test1", "test2");
client().prepareIndex("test1", "test").setSource("value", 1L).get();
client().prepareIndex("test1", "test").setSource("value", 2L).get();
client().prepareIndex("test2", "test").setSource("value", "a").get();
client().prepareIndex("test2", "test").setSource("value", "b").get();
refresh();
FieldStatsResponse response = client().prepareFieldStats().setFields("value", "value2").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").size(), equalTo(0));
assertThat(response.getConflicts().size(), equalTo(1));
assertThat(response.getConflicts().get("value"),
equalTo("Field [value] of type [integer] conflicts with existing field of type [string] " +
"in other index."));
response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
equalTo(new BytesRef("a")));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(),
equalTo(new BytesRef("b")));
}
public void testIncompatibleFieldTypesMultipleFields() {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long", "value2", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=text", "value2", "type=long"
));
ensureGreen("test1", "test2");
client().prepareIndex("test1", "test").setSource("value", 1L, "value2", 1L).get();
client().prepareIndex("test1", "test").setSource("value", 2L).get();
client().prepareIndex("test2", "test").setSource("value", "a").get();
client().prepareIndex("test2", "test").setSource("value", "b").get();
refresh();
FieldStatsResponse response = client().prepareFieldStats().setFields("value", "value2").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value2").getMinValue(), equalTo(1L));
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value2").getMaxValue(), equalTo(1L));
assertThat(response.getConflicts().size(), equalTo(1));
assertThat(response.getConflicts().get("value"),
equalTo("Field [value] of type [integer] conflicts with existing field of type [string] " +
"in other index."));
response = client().prepareFieldStats().setFields("value", "value2").setLevel("indices").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value2").getMinValue(), equalTo(1L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value2").getMaxValue(), equalTo(1L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
equalTo(new BytesRef("a")));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(),
equalTo(new BytesRef("b")));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(),
equalTo("string"));
}
public void testFieldStatsFiltering() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "value", "type=long"
));
assertAcked(prepareCreate("test3").addMapping(
"test", "value", "type=long"
));
ensureGreen("test1", "test2", "test3");
indexRange("test1", -10, 100);
indexRange("test2", 101, 200);
indexRange("test3", 201, 300);
FieldStatsResponse response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "200"),
new IndexConstraint("value", MAX , LTE, "300"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MAX, LTE, "200"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "100"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-20"),
new IndexConstraint("value", MAX, LT, "-10"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-100"),
new IndexConstraint("value", MAX, LTE, "-20"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "100"),
new IndexConstraint("value", MAX, LTE, "200"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L));
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "150"),
new IndexConstraint("value", MAX, LTE, "300"))
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats(), nullValue());
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L));
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L));
}
public void testIncompatibleFilter() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "value", "type=long"
));
indexRange("test1", -10, 100);
try {
client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MAX, LTE, "abc"))
.setLevel("indices")
.get();
fail("exception should have been thrown, because value abc is incompatible");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), equalTo("For input string: \"abc\""));
}
}
public void testWildcardFields() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"test", "foo", "type=long", "foobar", "type=long", "barfoo", "type=long"
));
assertAcked(prepareCreate("test2").addMapping(
"test", "foobar", "type=long", "barfoo", "type=long"
));
ensureGreen("test1", "test2");
FieldStatsResponse response = client().prepareFieldStats()
.setFields("foo*")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().size(), equalTo(0));
indexRange("test1", "foo", -100, 0);
indexRange("test2", "foo", -10, 100);
indexRange("test1", "foobar", -10, 100);
indexRange("test2", "foobar", -100, 0);
response = client().prepareFieldStats()
.setFields("foo*")
.get();
assertAllSuccessful(response);
assertThat(response.getAllFieldStats().size(), equalTo(2));
assertThat(response.getAllFieldStats().get("foo").getMinValue(), equalTo(-100L));
assertThat(response.getAllFieldStats().get("foo").getMaxValue(), equalTo(100L));
assertThat(response.getAllFieldStats().get("foobar").getMinValue(), equalTo(-100L));
assertThat(response.getAllFieldStats().get("foobar").getMaxValue(), equalTo(100L));
response = client().prepareFieldStats()
.setFields("foo*")
.setLevel("indices")
.get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foo").getMinValue(), equalTo(-100L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foo").getMaxValue(), equalTo(0L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foobar").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("test1").get("foobar").getMaxValue(), equalTo(100L));
assertThat(response.getIndicesMergedFieldStats().get("test2").size(), equalTo(2));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("foobar").getMinValue(), equalTo(-100L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("foobar").getMaxValue(), equalTo(0L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("foo").getMinValue(), equalTo(-10L));
assertThat(response.getIndicesMergedFieldStats().get("test2").get("foo").getMaxValue(), equalTo(100L));
}
public void testCached() throws Exception {
assertAcked(client().admin().indices().prepareCreate("test").setSettings("index.number_of_replicas", 0));
indexRange("test", "value", 0, 99);
// First query should be a cache miss
FieldStatsResponse fieldStats = client().prepareFieldStats().setFields("value").get();
assertEquals(100, fieldStats.getAllFieldStats().get("value").getDocCount());
RequestCacheStats indexStats = client().admin().indices().prepareStats().get().getIndex("test").getTotal().getRequestCache();
assertEquals(0, indexStats.getHitCount());
assertThat(indexStats.getMemorySizeInBytes(), greaterThan(0L));
// Second query should be a cache hit
fieldStats = client().prepareFieldStats().setFields("value").get();
assertEquals(100, fieldStats.getAllFieldStats().get("value").getDocCount());
indexStats = client().admin().indices().prepareStats().get().getIndex("test").getTotal().getRequestCache();
assertThat(indexStats.getHitCount(), greaterThan(0L));
assertThat(indexStats.getMemorySizeInBytes(), greaterThan(0L));
// Indexing some new documents and refreshing should give you consistent data.
long oldHitCount = indexStats.getHitCount();
indexRange("test", "value", 100, 199);
fieldStats = client().prepareFieldStats().setFields("value").get();
assertEquals(200, fieldStats.getAllFieldStats().get("value").getDocCount());
// Because we refreshed the index we don't have any more hits in the cache. This is read from the index.
assertEquals(oldHitCount, indexStats.getHitCount());
// We can also turn off the cache entirely
fieldStats = client().prepareFieldStats().setFields("value").get();
assertEquals(200, fieldStats.getAllFieldStats().get("value").getDocCount());
assertEquals(oldHitCount, indexStats.getHitCount());
}
public void testGeoPointNotIndexed() throws Exception {
assertAcked(prepareCreate("test").addMapping("test", "value", "type=long", "location", "type=geo_point,index=false"));
ensureGreen("test");
client().prepareIndex("test", "test").setSource("value", 1L, "location", new GeoPoint(32, -132)).get();
client().prepareIndex("test", "test").setSource("value", 2L).get();
client().prepareIndex("test", "test").setSource("value", 3L).get();
client().prepareIndex("test", "test").setSource("value", 4L).get();
refresh();
FieldStatsResponse response = client().prepareFieldStats().setFields("value", "location").get();
assertAllSuccessful(response);
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
assertThat(response.getAllFieldStats().get("location").getMinValue(), equalTo(null));
assertThat(response.getAllFieldStats().get("location").getMaxValue(), equalTo(null));
assertThat(response.getAllFieldStats().get("location").isAggregatable(), equalTo(true));
assertThat(response.getAllFieldStats().get("location").isSearchable(), equalTo(false));
}
private void indexRange(String index, long from, long to) throws Exception {
indexRange(index, "value", from, to);
}
private void indexRange(String index, String field, long from, long to) throws Exception {
List<IndexRequestBuilder> requests = new ArrayList<>();
for (long value = from; value <= to; value++) {
requests.add(client().prepareIndex(index, "test").setSource(field, value));
}
indexRandom(true, false, requests);
}
}

View File

@ -1,688 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.fieldstats;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
import org.elasticsearch.action.fieldstats.IndexConstraint;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GT;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.GTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LT;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.LTE;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX;
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN;
import static org.hamcrest.Matchers.containsString;
public class FieldStatsTests extends ESSingleNodeTestCase {
public void testByte() {
testNumberRange("field1", "byte", 12, 18);
testNumberRange("field1", "byte", -5, 5);
testNumberRange("field1", "byte", -18, -12);
}
public void testShort() {
testNumberRange("field1", "short", 256, 266);
testNumberRange("field1", "short", -5, 5);
testNumberRange("field1", "short", -266, -256);
}
public void testInteger() {
testNumberRange("field1", "integer", 56880, 56890);
testNumberRange("field1", "integer", -5, 5);
testNumberRange("field1", "integer", -56890, -56880);
}
public void testLong() {
testNumberRange("field1", "long", 312321312312412L, 312321312312422L);
testNumberRange("field1", "long", -5, 5);
testNumberRange("field1", "long", -312321312312422L, -312321312312412L);
}
private static String makeType(String type, boolean indexed, boolean docValues, boolean stored) {
return new StringBuilder()
.append("type=").append(type)
.append(",index=").append(indexed)
.append(",doc_values=").append(docValues)
.append(",store=").append(stored).toString();
}
public void testString() {
createIndex("test", Settings.EMPTY, "test",
"field_index", makeType("keyword", true, false, false),
"field_dv", makeType("keyword", false, true, false),
"field_stored", makeType("keyword", false, true, true),
"field_source", makeType("keyword", false, false, false));
for (int value = 0; value <= 10; value++) {
String keyword = String.format(Locale.ENGLISH, "%03d", value);
client().prepareIndex("test", "test")
.setSource("field_index", keyword,
"field_dv", keyword,
"field_stored", keyword,
"field_source", keyword).get();
}
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats()
.setFields("field_index", "field_dv", "field_stored", "field_source").get();
assertEquals(result.getAllFieldStats().size(), 3);
for (String field : new String[] {"field_index", "field_dv", "field_stored"}) {
FieldStats stats = result.getAllFieldStats().get(field);
assertEquals(stats.getMaxDoc(), 11L);
assertEquals(stats.getDisplayType(),
"string");
if ("field_index".equals(field)) {
assertEquals(stats.getMinValue(),
new BytesRef(String.format(Locale.ENGLISH, "%03d", 0)));
assertEquals(stats.getMaxValue(),
new BytesRef(String.format(Locale.ENGLISH, "%03d", 10)));
assertEquals(stats.getMinValueAsString(),
String.format(Locale.ENGLISH, "%03d", 0));
assertEquals(stats.getMaxValueAsString(),
String.format(Locale.ENGLISH, "%03d", 10));
assertEquals(stats.getDocCount(), 11L);
assertEquals(stats.getDensity(), 100);
} else {
assertEquals(stats.getDocCount(), 0L);
assertNull(stats.getMinValue());
assertNull(stats.getMaxValue());
assertEquals(stats.getDensity(), 0);
}
}
}
public void testDouble() {
createIndex("test", Settings.EMPTY, "test",
"field_index", makeType("double", true, false, false),
"field_dv", makeType("double", false, true, false),
"field_stored", makeType("double", false, true, true),
"field_source", makeType("double", false, false, false));
for (double value = -1; value <= 9; value++) {
client().prepareIndex("test", "test")
.setSource("field_index", value, "field_dv", value, "field_stored", value, "field_source", value).get();
}
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats()
.setFields("field_index", "field_dv", "field_stored", "field_source").get();
for (String field : new String[] {"field_index", "field_dv", "field_stored"}) {
FieldStats stats = result.getAllFieldStats().get(field);
assertEquals(stats.getMaxDoc(), 11L);
assertEquals(stats.getDisplayType(), "float");
if ("field_index".equals(field)) {
assertEquals(stats.getDocCount(), 11L);
assertEquals(stats.getDensity(), 100);
assertEquals(stats.getMinValue(), -1d);
assertEquals(stats.getMaxValue(), 9d);
assertEquals(stats.getMinValueAsString(), Double.toString(-1));
} else {
assertEquals(stats.getDocCount(), 0L);
assertNull(stats.getMinValue());
assertNull(stats.getMaxValue());
assertEquals(stats.getDensity(), 0);
}
}
}
public void testHalfFloat() {
createIndex("test", Settings.EMPTY, "test",
"field_index", makeType("half_float", true, false, false),
"field_dv", makeType("half_float", false, true, false),
"field_stored", makeType("half_float", false, true, true),
"field_source", makeType("half_float", false, false, false));
for (float value = -1; value <= 9; value++) {
client().prepareIndex("test", "test")
.setSource("field_index", value, "field_dv", value, "field_stored", value, "field_source", value).get();
}
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats()
.setFields("field_index", "field_dv", "field_stored", "field_source").get();
for (String field : new String[] {"field_index", "field_dv", "field_stored"}) {
FieldStats stats = result.getAllFieldStats().get(field);
assertEquals(stats.getMaxDoc(), 11L);
assertEquals(stats.getDisplayType(), "float");
if (field.equals("field_index")) {
assertEquals(stats.getDocCount(), 11L);
assertEquals(stats.getDensity(), 100);
assertEquals(stats.getMinValue(), -1d);
assertEquals(stats.getMaxValue(), 9d);
assertEquals(stats.getMinValueAsString(), Float.toString(-1));
assertEquals(stats.getMaxValueAsString(), Float.toString(9));
} else {
assertEquals(stats.getDocCount(), 0L);
assertNull(stats.getMinValue());
assertNull(stats.getMaxValue());
assertEquals(stats.getDensity(), 0);
}
}
}
public void testFloat() {
String fieldName = "field";
createIndex("test", Settings.EMPTY, "test",
"field_index", makeType("float", true, false, false),
"field_dv", makeType("float", false, true, false),
"field_stored", makeType("float", false, true, true),
"field_source", makeType("float", false, false, false));
for (float value = -1; value <= 9; value++) {
client().prepareIndex("test", "test")
.setSource("field_index", value, "field_dv", value, "field_stored", value, "field_source", value).get();
}
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats()
.setFields("field_index", "field_dv", "field_stored", "field_source").get();
for (String field : new String[]{"field_index", "field_dv", "field_stored"}) {
FieldStats stats = result.getAllFieldStats().get(field);
assertEquals(stats.getMaxDoc(), 11L);
assertEquals(stats.getDisplayType(), "float");
if (field.equals("field_index")) {
assertEquals(stats.getDocCount(), 11L);
assertEquals(stats.getDensity(), 100);
assertEquals(stats.getMinValue(), -1d);
assertEquals(stats.getMaxValue(), 9d);
assertEquals(stats.getMinValueAsString(), Float.toString(-1));
assertEquals(stats.getMaxValueAsString(), Float.toString(9));
} else {
assertEquals(stats.getDocCount(), 0L);
assertNull(stats.getMinValue());
assertNull(stats.getMaxValue());
assertEquals(stats.getDensity(), 0);
}
}
}
private void testNumberRange(String fieldName, String fieldType, long min, long max) {
createIndex("test", Settings.EMPTY, "test", fieldName, "type=" + fieldType);
// index=false
createIndex("test1", Settings.EMPTY, "test", fieldName, "type=" + fieldType + ",index=false");
// no value
createIndex("test2", Settings.EMPTY, "test", fieldName, "type=" + fieldType);
for (long value = min; value <= max; value++) {
client().prepareIndex("test", "test").setSource(fieldName, value).get();
}
client().admin().indices().prepareRefresh().get();
FieldStatsResponse result = client().prepareFieldStats().setFields(fieldName).get();
long numDocs = max - min + 1;
assertEquals(result.getAllFieldStats().get(fieldName).getMaxDoc(), numDocs);
assertEquals(result.getAllFieldStats().get(fieldName).getDocCount(), numDocs);
assertEquals(result.getAllFieldStats().get(fieldName).getDensity(), 100);
assertEquals(result.getAllFieldStats().get(fieldName).getMinValue(), min);
assertEquals(result.getAllFieldStats().get(fieldName).getMaxValue(), max);
assertEquals(result.getAllFieldStats().get(fieldName).getMinValueAsString(),
java.lang.Long.toString(min));
assertEquals(result.getAllFieldStats().get(fieldName).getMaxValueAsString(),
java.lang.Long.toString(max));
assertEquals(result.getAllFieldStats().get(fieldName).isSearchable(), true);
assertEquals(result.getAllFieldStats().get(fieldName).isAggregatable(), true);
if (fieldType.equals("float") || fieldType.equals("double") || fieldType.equals("half-float")) {
assertEquals(result.getAllFieldStats().get(fieldName).getDisplayType(), "float");
} else {
assertEquals(result.getAllFieldStats().get(fieldName).getDisplayType(), "integer");
}
client().admin().indices().prepareDelete("test").get();
client().admin().indices().prepareDelete("test1").get();
client().admin().indices().prepareDelete("test2").get();
}
public void testMerge() {
List<FieldStats> stats = new ArrayList<>();
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
stats.add(new FieldStats.Long(0, 0, 0, 0, false, false));
FieldStats stat = new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L);
for (FieldStats otherStat : stats) {
stat.accumulate(otherStat);
}
assertEquals(stat.getMaxDoc(), 4L);
assertEquals(stat.getDocCount(), 4L);
assertEquals(stat.getSumDocFreq(), 4L);
assertEquals(stat.getSumTotalTermFreq(), 4L);
assertEquals(stat.isSearchable(), true);
assertEquals(stat.isAggregatable(), false);
assertEquals(stat.getDisplayType(), "integer");
}
public void testMerge_notAvailable() {
List<FieldStats> stats = new ArrayList<>();
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, true, 1L, 1L));
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, true, 1L, 1L));
stats.add(new FieldStats.Long(1, 1L, 1L, 1L, true, false, 1L, 1L));
FieldStats stat = new FieldStats.Long(1, -1L, -1L, -1L, false, true, 1L, 1L);
for (FieldStats otherStat : stats) {
stat.accumulate(otherStat);
}
assertEquals(stat.getMaxDoc(), 4L);
assertEquals(stat.getDocCount(), -1L);
assertEquals(stat.getSumDocFreq(), -1L);
assertEquals(stat.getSumTotalTermFreq(), -1L);
assertEquals(stat.isSearchable(), true);
assertEquals(stat.isAggregatable(), true);
assertEquals(stat.getDisplayType(), "integer");
stats.add(new FieldStats.Long(1, -1L, -1L, -1L, false, true));
stat = stats.remove(0);
for (FieldStats otherStat : stats) {
stat.accumulate(otherStat);
}
assertEquals(stat.getMaxDoc(), 4L);
assertEquals(stat.getDocCount(), -1L);
assertEquals(stat.getSumDocFreq(), -1L);
assertEquals(stat.getSumTotalTermFreq(), -1L);
assertEquals(stat.isSearchable(), true);
assertEquals(stat.isAggregatable(), true);
assertEquals(stat.getDisplayType(), "integer");
assertNull(stat.getMaxValue());
assertNull(stat.getMinValue());
}
public void testNumberFiltering() {
createIndex("test1", Settings.EMPTY, "type", "value", "type=long");
client().prepareIndex("test1", "type").setSource("value", 1L).get();
createIndex("test2", Settings.EMPTY, "type", "value", "type=long");
client().prepareIndex("test2", "type").setSource("value", 3L).get();
client().admin().indices().prepareRefresh().get();
FieldStatsResponse response = client().prepareFieldStats()
.setFields("value")
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 2);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), 1L);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), 3L);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "-1"),
new IndexConstraint("value", MAX, LTE, "0"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "0"),
new IndexConstraint("value", MAX, LT, "1"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "0"),
new IndexConstraint("value", MAX, LTE, "1"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), 1L);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "1"),
new IndexConstraint("value", MAX, LTE, "2"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), 1L);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT, "1"),
new IndexConstraint("value", MAX, LTE, "2"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2"),
new IndexConstraint("value", MAX, LTE, "3"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), 3L);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "3"),
new IndexConstraint("value", MAX, LTE, "4"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), 3L);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT, "3"),
new IndexConstraint("value", MAX, LTE, "4"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "1"),
new IndexConstraint("value", MAX, LTE, "3"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 2);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), 1L);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), 3L);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT, "1"),
new IndexConstraint("value", MAX, LT, "3"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
}
public void testDateFiltering() {
DateTime dateTime1 = new DateTime(2014, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC);
String dateTime1Str = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().print(dateTime1);
DateTime dateTime2 = new DateTime(2014, 1, 2, 0, 0, 0, 0, DateTimeZone.UTC);
String dateTime2Str = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().print(dateTime2);
createIndex("test1", Settings.EMPTY, "type", "value", "type=date", "value2", "type=date,index=false");
client().prepareIndex("test1", "type")
.setSource("value", dateTime1Str, "value2", dateTime1Str).get();
createIndex("test2", Settings.EMPTY, "type", "value", "type=date");
client().prepareIndex("test2", "type").setSource("value", dateTime2Str).get();
client().admin().indices().prepareRefresh().get();
FieldStatsResponse response = client().prepareFieldStats()
.setFields("value")
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 2);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
dateTime1.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
dateTime2.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
dateTime1Str);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
dateTime2Str);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(),
"date");
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2013-12-30T00:00:00.000Z"),
new IndexConstraint("value", MAX, LTE, "2013-12-31T00:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2013-12-31T00:00:00.000Z"),
new IndexConstraint("value", MAX, LTE, "2014-01-01T00:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
dateTime1.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
dateTime1Str);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getDisplayType(),
"date");
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2014-01-01T00:00:00.000Z"),
new IndexConstraint("value", MAX, LTE, "2014-01-02T00:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
dateTime2.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
dateTime2Str);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT, "2014-01-02T00:00:00.000Z"),
new IndexConstraint("value", MAX, LTE, "2014-01-03T00:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2014-01-01T23:00:00.000Z"),
new IndexConstraint("value", MAX, LTE, "2014-01-02T01:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
dateTime2.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
dateTime2Str);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(),
"date");
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "2014-01-01T00:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 2);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
dateTime1.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
dateTime2.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
dateTime1Str);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
dateTime2Str);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(),
"date");
response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MAX, LTE, "2014-01-02T00:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 2);
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(),
dateTime1.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(),
dateTime2.getMillis());
assertEquals(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(),
dateTime1Str);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
dateTime2Str);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(), "date");
response = client().prepareFieldStats()
.setFields("value2")
.setIndexContraints(new IndexConstraint("value2", MAX, LTE, "2014-01-02T00:00:00.000Z"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 0);
}
public void testDateFiltering_optionalFormat() {
createIndex("test1", Settings.EMPTY, "type", "value", "type=date,format=strict_date_optional_time");
client().prepareIndex("test1", "type").setSource("value", "2014-01-01T00:00:00.000Z").get();
createIndex("test2", Settings.EMPTY, "type", "value", "type=date,format=strict_date_optional_time");
client().prepareIndex("test2", "type").setSource("value", "2014-01-02T00:00:00.000Z").get();
client().admin().indices().prepareRefresh().get();
DateTime dateTime1 = new DateTime(2014, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC);
DateTime dateTime2 = new DateTime(2014, 1, 2, 0, 0, 0, 0, DateTimeZone.UTC);
FieldStatsResponse response = client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT,
String.valueOf(dateTime1.getMillis()), "epoch_millis"),
new IndexConstraint("value", MAX, LTE, String.valueOf(dateTime2.getMillis()), "epoch_millis"))
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(),
"2014-01-02T00:00:00.000Z");
assertEquals(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(),
"date");
try {
client().prepareFieldStats()
.setFields("value")
.setIndexContraints(new IndexConstraint("value", MIN, GT,
String.valueOf(dateTime1.getMillis()), "xyz"))
.setLevel("indices")
.get();
fail("IllegalArgumentException should have been thrown");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Invalid format"));
}
}
public void testEmptyIndex() {
createIndex("test1", Settings.EMPTY, "type", "value", "type=date");
FieldStatsResponse response = client().prepareFieldStats()
.setFields("*")
.setLevel("indices")
.get();
assertEquals(response.getIndicesMergedFieldStats().size(), 1);
assertEquals(response.getIndicesMergedFieldStats().get("test1").size(), 0);
}
public void testMetaFieldsNotIndexed() {
createIndex("test", Settings.EMPTY);
client().prepareIndex("test", "type").setSource().get();
client().admin().indices().prepareRefresh().get();
FieldStatsResponse response = client().prepareFieldStats()
.setFields("_uid", "_type")
.get();
assertEquals(response.getAllFieldStats().size(), 1);
assertEquals(response.getAllFieldStats().get("_type").isSearchable(), true);
assertEquals(response.getAllFieldStats().get("_type").isAggregatable(), true);
}
public void testSerialization() throws IOException {
for (Version version : new Version[] {Version.CURRENT, Version.V_5_0_1}){
for (int i = 0; i < 20; i++) {
assertSerialization(randomFieldStats(version.onOrAfter(Version.V_5_2_0)), version);
}
}
}
/**
* creates a random field stats which does not guarantee that {@link FieldStats#maxValue} is greater than {@link FieldStats#minValue}
**/
public static FieldStats randomFieldStats(boolean withNullMinMax) throws UnknownHostException {
int type = randomInt(5);
switch (type) {
case 0:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Long(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Long(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(), randomLong(), randomLong());
}
case 1:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Double(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Double(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(), randomDouble(), randomDouble());
}
case 2:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Date(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Date(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(), Joda.forPattern("basicDate"),
new Date().getTime(), new Date().getTime());
}
case 3:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Text(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Text(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(),
new BytesRef(randomAlphaOfLength(10)), new BytesRef(randomAlphaOfLength(20)));
}
case 4:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(),
InetAddress.getByName("::1"), InetAddress.getByName("::1"));
}
case 5:
if (withNullMinMax && randomBoolean()) {
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean());
} else {
return new FieldStats.Ip(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
randomNonNegativeLong(), randomBoolean(), randomBoolean(),
InetAddress.getByName("1.2.3.4"), InetAddress.getByName("1.2.3.4"));
}
default:
throw new IllegalArgumentException("Invalid type");
}
}
private void assertSerialization(FieldStats stats, Version version) throws IOException {
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(version);
stats.writeTo(output);
output.flush();
StreamInput input = output.bytes().streamInput();
input.setVersion(version);
FieldStats deserializedStats = FieldStats.readFrom(input);
assertEquals(stats, deserializedStats);
assertEquals(stats.hashCode(), deserializedStats.hashCode());
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
@ -32,7 +31,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
@ -134,43 +132,6 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
assertEquals(10/ft.getScalingFactor(), ft.valueForDisplay(10L));
}
public void testStats() throws IOException {
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
ft.setName("scaled_float");
ft.setScalingFactor(0.1 + randomDouble() * 100);
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
try (DirectoryReader reader = DirectoryReader.open(w)) {
assertNull(ft.stats(reader));
}
Document doc = new Document();
doc.add(new StoredField("scaled_float", -1));
w.addDocument(doc);
try (DirectoryReader reader = DirectoryReader.open(w)) {
// field exists, but has no point values
FieldStats<?> stats = ft.stats(reader);
assertFalse(stats.hasMinMax());
assertNull(stats.getMinValue());
assertNull(stats.getMaxValue());
}
LongPoint point = new LongPoint("scaled_float", -1);
doc.add(point);
w.addDocument(doc);
point.setLongValue(10);
w.addDocument(doc);
try (DirectoryReader reader = DirectoryReader.open(w)) {
FieldStats<?> stats = ft.stats(reader);
assertEquals(-1/ft.getScalingFactor(), stats.getMinValue());
assertEquals(10/ft.getScalingFactor(), stats.getMaxValue());
assertEquals(3, stats.getMaxDoc());
}
w.deleteAll();
try (DirectoryReader reader = DirectoryReader.open(w)) {
assertNull(ft.stats(reader));
}
IOUtils.close(w, dir);
}
public void testFieldData() throws IOException {
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
ft.setScalingFactor(0.1 + randomDouble() * 100);

View File

@ -1,49 +0,0 @@
{
"field_stats": {
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/search-field-stats.html",
"methods": ["GET", "POST"],
"url": {
"path": "/_field_stats",
"paths": [
"/_field_stats",
"/{index}/_field_stats"
],
"parts": {
"index": {
"type" : "list",
"description" : "A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices"
}
},
"params": {
"fields": {
"type" : "list",
"description" : "A comma-separated list of fields for to get field statistics for (min value, max value, and more)"
},
"level": {
"type" : "enum",
"options" : ["indices", "cluster"],
"default" : "cluster",
"description" : "Defines if field stats should be returned on a per index level or on a cluster wide level"
},
"ignore_unavailable": {
"type" : "boolean",
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"
},
"allow_no_indices": {
"type" : "boolean",
"description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)"
},
"expand_wildcards": {
"type" : "enum",
"options" : ["open","closed","none","all"],
"default" : "open",
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
}
}
},
"body": {
"description": "Field json objects containing the name and optionally a range to filter out indices result, that have results outside the defined bounds",
"required": false
}
}
}

View File

@ -1,319 +0,0 @@
setup:
- do:
indices.create:
index: test_1
body:
mappings:
test:
properties:
foo:
type: text
number:
type: long
bar:
type: long
geo:
type: geo_point
geo_shape:
type: geo_shape
tree: quadtree
precision: 1m
- do:
indices.create:
index: test_2
body:
mappings:
test:
properties:
foo:
type: text
number:
type: long
bar:
type: text
geo:
type: geo_point
index: no
geo_shape:
type: geo_shape
tree: quadtree
precision: 1m
- do:
index:
index: test_1
type: test
id: id_1
body: { foo: "bar", number: 123, bar: 123, geo: { lat: 48.858093, lon: 2.294694} }
- do:
index:
index: test_2
type: test
id: id_10
body: { foo: "babar", number: 456, bar: "123", geo: { lat: 48.858093, lon: 2.294694}, geo_shape: {type: "linestring", coordinates : [[-77.03653, 38.897676], [-77.009051, 38.889939]] } }
- do:
indices.refresh: {}
---
"Basic field stats":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
fields: [foo, number]
- match: { indices._all.fields.foo.max_doc: 2 }
- match: { indices._all.fields.foo.doc_count: 2 }
- match: { indices._all.fields.foo.min_value: "babar" }
- match: { indices._all.fields.foo.max_value: "bar" }
- match: { indices._all.fields.foo.type: "string" }
- is_false: indices._all.fields.foo.min_value_as_string
- is_false: indices._all.fields.foo.max_value_as_string
- match: { indices._all.fields.foo.searchable: true }
- match: { indices._all.fields.foo.aggregatable: false }
- match: { indices._all.fields.number.max_doc: 2 }
- match: { indices._all.fields.number.doc_count: 2 }
- match: { indices._all.fields.number.searchable: true }
- match: { indices._all.fields.number.aggregatable: true }
- match: { indices._all.fields.number.min_value: 123 }
- match: { indices._all.fields.number.min_value_as_string: "123" }
- match: { indices._all.fields.number.max_value: 456 }
- match: { indices._all.fields.number.max_value_as_string: "456" }
- match: { indices._all.fields.number.type: "integer" }
- is_false: conflicts
---
"Geo field stats":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
fields: [geo, geo_shape]
- match: { indices._all.fields.geo.type: "geo_point" }
- match: { indices._all.fields.geo.max_doc: 2 }
- match: { indices._all.fields.geo.doc_count: -1 }
- match: { indices._all.fields.geo.searchable: true }
- match: { indices._all.fields.geo.aggregatable: true }
- is_false: indices._all.fields.geo.min_value
- is_false: indices._all.fields.geo.max_value
- is_false: indices._all.fields.geo.min_value_as_string
- is_false: indices._all.fields.geo.max_value_as_string
- match: { indices._all.fields.geo_shape.type: "string" }
- match: { indices._all.fields.geo_shape.max_doc: 1 }
- match: { indices._all.fields.geo_shape.doc_count: -1 }
- match: { indices._all.fields.geo_shape.searchable: true }
- match: { indices._all.fields.geo_shape.aggregatable: false }
- is_false: indices._all.fields.geo_shape.min_value
- is_false: indices._all.fields.geo_shape.max_value
- is_false: indices._all.fields.geo_shape.min_value_as_string
- is_false: indices._all.fields.geo_shape.max_value_as_string
- is_false: conflicts
---
"Basic field stats with level set to indices":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
fields: [foo, number]
level: indices
- match: { indices.test_1.fields.foo.max_doc: 1 }
- match: { indices.test_1.fields.foo.doc_count: 1 }
- match: { indices.test_1.fields.foo.min_value: "bar" }
- match: { indices.test_1.fields.foo.max_value: "bar" }
- is_false: indices.test_1.fields.foo.min_value_as_string
- is_false: indices.test_1.fields.foo.max_value_as_string
- match: { indices.test_1.fields.foo.searchable: true }
- match: { indices.test_1.fields.foo.aggregatable: false }
- match: { indices.test_1.fields.foo.type: "string" }
- match: { indices.test_1.fields.number.max_doc: 1 }
- match: { indices.test_1.fields.number.doc_count: 1 }
- match: { indices.test_1.fields.number.searchable: true }
- match: { indices.test_1.fields.number.aggregatable: true }
- match: { indices.test_1.fields.number.min_value: 123 }
- match: { indices.test_1.fields.number.min_value_as_string: "123" }
- match: { indices.test_1.fields.number.max_value: 123 }
- match: { indices.test_1.fields.number.max_value_as_string: "123" }
- match: { indices.test_1.fields.number.type: "integer" }
- match: { indices.test_2.fields.foo.max_doc: 1 }
- match: { indices.test_2.fields.foo.doc_count: 1 }
- match: { indices.test_2.fields.foo.min_value: "babar" }
- match: { indices.test_2.fields.foo.max_value: "babar" }
- match: { indices.test_2.fields.foo.type: "string" }
- is_false: indices.test_2.fields.foo.min_value_as_string
- is_false: indices.test_2.fields.foo.max_value_as_string
- match: { indices.test_2.fields.foo.searchable: true }
- match: { indices.test_2.fields.foo.aggregatable: false }
- match: { indices.test_2.fields.foo.type: "string" }
- match: { indices.test_2.fields.number.max_doc: 1 }
- match: { indices.test_2.fields.number.doc_count: 1 }
- match: { indices.test_2.fields.number.searchable: true }
- match: { indices.test_2.fields.number.aggregatable: true }
- match: { indices.test_2.fields.number.min_value: 456 }
- match: { indices.test_2.fields.number.min_value_as_string: "456" }
- match: { indices.test_2.fields.number.max_value: 456 }
- match: { indices.test_2.fields.number.max_value_as_string: "456" }
- match: { indices.test_2.fields.number.type: "integer" }
- is_false: conflicts
---
"Geo field stats with level set to indices":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
fields: [geo, geo_shape]
level: indices
- match: { indices.test_1.fields.geo.max_doc: 1 }
- match: { indices.test_1.fields.geo.doc_count: 1 }
- is_true: indices.test_1.fields.geo.min_value
- is_true: indices.test_1.fields.geo.max_value
- is_true: indices.test_1.fields.geo.min_value_as_string
- is_true: indices.test_1.fields.geo.max_value_as_string
- match: { indices.test_1.fields.geo.searchable: true }
- match: { indices.test_1.fields.geo.aggregatable: true }
- match: { indices.test_1.fields.geo.type: "geo_point" }
- is_true: indices.test_2.fields.geo
- match: { indices.test_2.fields.geo_shape.max_doc: 1 }
- match: { indices.test_2.fields.geo_shape.doc_count: -1 }
- is_false: indices.test_2.fields.geo_shape.min_value
- is_false: indices.test_2.fields.geo_shape.max_value
- is_false: indices.test_2.fields.geo_shape.min_value_as_string
- is_false: indices.test_2.fields.geo_shape.max_value_as_string
- match: { indices.test_2.fields.geo_shape.searchable: true }
- match: { indices.test_2.fields.geo_shape.aggregatable: false }
- match: { indices.test_2.fields.geo_shape.type: "string" }
- is_false: conflicts
---
"Geopoint field stats":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
fields: [geo]
level: indices
- match: { indices.test_2.fields.geo.max_doc: 1 }
- match: { indices.test_2.fields.geo.doc_count: -1 }
- is_false: indices.test_2.fields.geo.min_value
- is_false: indices.test_2.fields.geo.max_value
- match: { indices.test_2.fields.geo.searchable: false }
- match: { indices.test_2.fields.geo.aggregatable: true }
- match: { indices.test_2.fields.geo.type: "geo_point" }
- is_true: indices.test_2.fields.geo
- is_false: conflicts
---
"Field stats with filtering":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
level: indices
index: test_1
body: { fields: ["foo"], index_constraints: { number: { min_value : { gte: 100 } } }}
- match: { indices.test_1.fields.foo.max_doc: 1 }
- match: { indices.test_1.fields.foo.doc_count: 1 }
- match: { indices.test_1.fields.foo.searchable: true }
- match: { indices.test_1.fields.foo.aggregatable: false }
- match: { indices.test_1.fields.foo.min_value: "bar" }
- match: { indices.test_1.fields.foo.max_value: "bar" }
- match: { indices.test_1.fields.foo.type: "string" }
- is_false: indices.test_1.fields.number
- is_false: conflicts
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
level: indices
index: test_1
body: { fields: ["foo"], index_constraints : { number: { min_value : { gte: 200} } }}
- is_false: indices.test_1
---
"Field stats both source and fields":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
catch: request
field_stats:
index: test_1
fields: ["foo"]
body: { fields: ["foo"]}
---
"Field stats with conflicts":
- skip:
version: " - 5.3.99"
reason: Deprecation was added in 5.4.0
features: warnings
- do:
warnings:
- "[_field_stats] endpoint is deprecated! Use [_field_caps] instead or run a min/max aggregations on the desired fields."
field_stats:
fields: [foo, number, bar]
- match: { indices._all.fields.foo.max_doc: 2 }
- match: { indices._all.fields.foo.doc_count: 2 }
- match: { indices._all.fields.foo.min_value: "babar" }
- match: { indices._all.fields.foo.max_value: "bar" }
- match: { indices._all.fields.foo.searchable: true }
- match: { indices._all.fields.foo.aggregatable: false }
- match: { indices._all.fields.foo.type: "string" }
- match: { indices._all.fields.number.max_doc: 2 }
- match: { indices._all.fields.number.doc_count: 2 }
- match: { indices._all.fields.number.searchable: true }
- match: { indices._all.fields.number.aggregatable: true }
- match: { indices._all.fields.number.min_value: 123 }
- match: { indices._all.fields.number.min_value_as_string: "123" }
- match: { indices._all.fields.number.max_value: 456 }
- match: { indices._all.fields.number.max_value_as_string: "456" }
- match: { indices._all.fields.number.type: "integer" }
- match: { conflicts.bar: "Field [bar] of type [integer] conflicts with existing field of type [string] in other index." }
- is_false: indices._all.fields.bar