Revert "Added normalisation to Derivative Reducer"
This reverts commit 48a94a41df
.
This commit is contained in:
parent
48a94a41df
commit
306d94adb9
|
@ -20,17 +20,16 @@
|
||||||
package org.elasticsearch.search.aggregations.reducers.derivative;
|
package org.elasticsearch.search.aggregations.reducers.derivative;
|
||||||
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
|
||||||
import org.elasticsearch.search.aggregations.reducers.BucketHelpers.GapPolicy;
|
|
||||||
import org.elasticsearch.search.aggregations.reducers.ReducerBuilder;
|
import org.elasticsearch.search.aggregations.reducers.ReducerBuilder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.elasticsearch.search.aggregations.reducers.BucketHelpers.GapPolicy;
|
||||||
|
|
||||||
public class DerivativeBuilder extends ReducerBuilder<DerivativeBuilder> {
|
public class DerivativeBuilder extends ReducerBuilder<DerivativeBuilder> {
|
||||||
|
|
||||||
private String format;
|
private String format;
|
||||||
private GapPolicy gapPolicy;
|
private GapPolicy gapPolicy;
|
||||||
private String units;
|
|
||||||
|
|
||||||
public DerivativeBuilder(String name) {
|
public DerivativeBuilder(String name) {
|
||||||
super(name, DerivativeReducer.TYPE.name());
|
super(name, DerivativeReducer.TYPE.name());
|
||||||
|
@ -46,16 +45,6 @@ public class DerivativeBuilder extends ReducerBuilder<DerivativeBuilder> {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DerivativeBuilder units(String units) {
|
|
||||||
this.units = units;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public DerivativeBuilder units(DateHistogramInterval units) {
|
|
||||||
this.units = units.toString();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
if (format != null) {
|
if (format != null) {
|
||||||
|
@ -64,9 +53,6 @@ public class DerivativeBuilder extends ReducerBuilder<DerivativeBuilder> {
|
||||||
if (gapPolicy != null) {
|
if (gapPolicy != null) {
|
||||||
builder.field(DerivativeParser.GAP_POLICY.getPreferredName(), gapPolicy.getName());
|
builder.field(DerivativeParser.GAP_POLICY.getPreferredName(), gapPolicy.getName());
|
||||||
}
|
}
|
||||||
if (units != null) {
|
|
||||||
builder.field(DerivativeParser.UNITS.getPreferredName(), units);
|
|
||||||
}
|
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,15 +19,9 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.reducers.derivative;
|
package org.elasticsearch.search.aggregations.reducers.derivative;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.collect.MapBuilder;
|
|
||||||
import org.elasticsearch.common.rounding.DateTimeUnit;
|
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.SearchParseException;
|
import org.elasticsearch.search.SearchParseException;
|
||||||
import org.elasticsearch.search.aggregations.reducers.BucketHelpers.GapPolicy;
|
|
||||||
import org.elasticsearch.search.aggregations.reducers.Reducer;
|
import org.elasticsearch.search.aggregations.reducers.Reducer;
|
||||||
import org.elasticsearch.search.aggregations.reducers.ReducerFactory;
|
import org.elasticsearch.search.aggregations.reducers.ReducerFactory;
|
||||||
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
import org.elasticsearch.search.aggregations.support.format.ValueFormat;
|
||||||
|
@ -38,23 +32,12 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.elasticsearch.search.aggregations.reducers.BucketHelpers.GapPolicy;
|
||||||
|
|
||||||
public class DerivativeParser implements Reducer.Parser {
|
public class DerivativeParser implements Reducer.Parser {
|
||||||
|
|
||||||
public static final ParseField FORMAT = new ParseField("format");
|
public static final ParseField FORMAT = new ParseField("format");
|
||||||
public static final ParseField GAP_POLICY = new ParseField("gap_policy");
|
public static final ParseField GAP_POLICY = new ParseField("gap_policy");
|
||||||
public static final ParseField UNITS = new ParseField("units");
|
|
||||||
|
|
||||||
private final ImmutableMap<String, DateTimeUnit> dateFieldUnits;
|
|
||||||
|
|
||||||
public DerivativeParser() {
|
|
||||||
dateFieldUnits = MapBuilder.<String, DateTimeUnit> newMapBuilder().put("year", DateTimeUnit.YEAR_OF_CENTURY)
|
|
||||||
.put("1y", DateTimeUnit.YEAR_OF_CENTURY).put("quarter", DateTimeUnit.QUARTER).put("1q", DateTimeUnit.QUARTER)
|
|
||||||
.put("month", DateTimeUnit.MONTH_OF_YEAR).put("1M", DateTimeUnit.MONTH_OF_YEAR).put("week", DateTimeUnit.WEEK_OF_WEEKYEAR)
|
|
||||||
.put("1w", DateTimeUnit.WEEK_OF_WEEKYEAR).put("day", DateTimeUnit.DAY_OF_MONTH).put("1d", DateTimeUnit.DAY_OF_MONTH)
|
|
||||||
.put("hour", DateTimeUnit.HOUR_OF_DAY).put("1h", DateTimeUnit.HOUR_OF_DAY).put("minute", DateTimeUnit.MINUTES_OF_HOUR)
|
|
||||||
.put("1m", DateTimeUnit.MINUTES_OF_HOUR).put("second", DateTimeUnit.SECOND_OF_MINUTE)
|
|
||||||
.put("1s", DateTimeUnit.SECOND_OF_MINUTE).immutableMap();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String type() {
|
public String type() {
|
||||||
|
@ -67,7 +50,6 @@ public class DerivativeParser implements Reducer.Parser {
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
String[] bucketsPaths = null;
|
String[] bucketsPaths = null;
|
||||||
String format = null;
|
String format = null;
|
||||||
String units = null;
|
|
||||||
GapPolicy gapPolicy = GapPolicy.IGNORE;
|
GapPolicy gapPolicy = GapPolicy.IGNORE;
|
||||||
|
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
@ -80,8 +62,6 @@ public class DerivativeParser implements Reducer.Parser {
|
||||||
bucketsPaths = new String[] { parser.text() };
|
bucketsPaths = new String[] { parser.text() };
|
||||||
} else if (GAP_POLICY.match(currentFieldName)) {
|
} else if (GAP_POLICY.match(currentFieldName)) {
|
||||||
gapPolicy = GapPolicy.parse(context, parser.text());
|
gapPolicy = GapPolicy.parse(context, parser.text());
|
||||||
} else if (UNITS.match(currentFieldName)) {
|
|
||||||
units = parser.text();
|
|
||||||
} else {
|
} else {
|
||||||
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
|
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + reducerName + "]: ["
|
||||||
+ currentFieldName + "].");
|
+ currentFieldName + "].");
|
||||||
|
@ -113,20 +93,7 @@ public class DerivativeParser implements Reducer.Parser {
|
||||||
formatter = ValueFormat.Patternable.Number.format(format).formatter();
|
formatter = ValueFormat.Patternable.Number.format(format).formatter();
|
||||||
}
|
}
|
||||||
|
|
||||||
long xAxisUnits = -1;
|
return new DerivativeReducer.Factory(reducerName, bucketsPaths, formatter, gapPolicy);
|
||||||
if (units != null) {
|
|
||||||
DateTimeUnit dateTimeUnit = dateFieldUnits.get(units);
|
|
||||||
if (dateTimeUnit != null) {
|
|
||||||
xAxisUnits = dateTimeUnit.field().getDurationField().getUnitMillis();
|
|
||||||
} else {
|
|
||||||
TimeValue timeValue = TimeValue.parseTimeValue(units, null);
|
|
||||||
if (timeValue != null) {
|
|
||||||
xAxisUnits = timeValue.getMillis();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return new DerivativeReducer.Factory(reducerName, bucketsPaths, formatter, gapPolicy, xAxisUnits);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
|
||||||
|
@ -40,7 +39,6 @@ import org.elasticsearch.search.aggregations.reducers.ReducerFactory;
|
||||||
import org.elasticsearch.search.aggregations.reducers.ReducerStreams;
|
import org.elasticsearch.search.aggregations.reducers.ReducerStreams;
|
||||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
|
||||||
import org.joda.time.DateTime;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -68,17 +66,15 @@ public class DerivativeReducer extends Reducer {
|
||||||
|
|
||||||
private ValueFormatter formatter;
|
private ValueFormatter formatter;
|
||||||
private GapPolicy gapPolicy;
|
private GapPolicy gapPolicy;
|
||||||
private long xAxisUnits;
|
|
||||||
|
|
||||||
public DerivativeReducer() {
|
public DerivativeReducer() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public DerivativeReducer(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter, GapPolicy gapPolicy, long xAxisUnits,
|
public DerivativeReducer(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter, GapPolicy gapPolicy,
|
||||||
Map<String, Object> metadata) {
|
Map<String, Object> metadata) {
|
||||||
super(name, bucketsPaths, metadata);
|
super(name, bucketsPaths, metadata);
|
||||||
this.formatter = formatter;
|
this.formatter = formatter;
|
||||||
this.gapPolicy = gapPolicy;
|
this.gapPolicy = gapPolicy;
|
||||||
this.xAxisUnits = xAxisUnits;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -93,74 +89,51 @@ public class DerivativeReducer extends Reducer {
|
||||||
InternalHistogram.Factory<? extends InternalHistogram.Bucket> factory = histo.getFactory();
|
InternalHistogram.Factory<? extends InternalHistogram.Bucket> factory = histo.getFactory();
|
||||||
|
|
||||||
List newBuckets = new ArrayList<>();
|
List newBuckets = new ArrayList<>();
|
||||||
Long lastBucketKey = null;
|
|
||||||
Double lastBucketValue = null;
|
Double lastBucketValue = null;
|
||||||
for (InternalHistogram.Bucket bucket : buckets) {
|
for (InternalHistogram.Bucket bucket : buckets) {
|
||||||
Long thisBucketKey = resolveBucketKeyAsLong(bucket);
|
|
||||||
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy);
|
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], gapPolicy);
|
||||||
if (lastBucketValue != null) {
|
if (lastBucketValue != null) {
|
||||||
double gradient = thisBucketValue - lastBucketValue;
|
double diff = thisBucketValue - lastBucketValue;
|
||||||
if (xAxisUnits != -1) {
|
|
||||||
double xDiff = (thisBucketKey - lastBucketKey) / xAxisUnits;
|
List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(), AGGREGATION_TRANFORM_FUNCTION));
|
||||||
gradient = gradient / xDiff;
|
aggs.add(new InternalSimpleValue(name(), diff, formatter, new ArrayList<Reducer>(), metaData()));
|
||||||
}
|
|
||||||
List<InternalAggregation> aggs = new ArrayList<>(Lists.transform(bucket.getAggregations().asList(),
|
|
||||||
AGGREGATION_TRANFORM_FUNCTION));
|
|
||||||
aggs.add(new InternalSimpleValue(name(), gradient, formatter, new ArrayList<Reducer>(), metaData()));
|
|
||||||
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
InternalHistogram.Bucket newBucket = factory.createBucket(bucket.getKey(), bucket.getDocCount(), new InternalAggregations(
|
||||||
aggs), bucket.getKeyed(), bucket.getFormatter());
|
aggs), bucket.getKeyed(), bucket.getFormatter());
|
||||||
newBuckets.add(newBucket);
|
newBuckets.add(newBucket);
|
||||||
} else {
|
} else {
|
||||||
newBuckets.add(bucket);
|
newBuckets.add(bucket);
|
||||||
}
|
}
|
||||||
lastBucketKey = thisBucketKey;
|
|
||||||
lastBucketValue = thisBucketValue;
|
lastBucketValue = thisBucketValue;
|
||||||
}
|
}
|
||||||
return factory.create(newBuckets, histo);
|
return factory.create(newBuckets, histo);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Long resolveBucketKeyAsLong(InternalHistogram.Bucket bucket) {
|
|
||||||
Object key = bucket.getKey();
|
|
||||||
if (key instanceof DateTime) {
|
|
||||||
return ((DateTime) key).getMillis();
|
|
||||||
} else if (key instanceof Number) {
|
|
||||||
return ((Number) key).longValue();
|
|
||||||
} else {
|
|
||||||
throw new AggregationExecutionException("Bucket keys must be either a Number or a DateTime for aggregation " + name()
|
|
||||||
+ ". Found bucket with key " + key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doReadFrom(StreamInput in) throws IOException {
|
public void doReadFrom(StreamInput in) throws IOException {
|
||||||
formatter = ValueFormatterStreams.readOptional(in);
|
formatter = ValueFormatterStreams.readOptional(in);
|
||||||
gapPolicy = GapPolicy.readFrom(in);
|
gapPolicy = GapPolicy.readFrom(in);
|
||||||
xAxisUnits = in.readLong();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doWriteTo(StreamOutput out) throws IOException {
|
public void doWriteTo(StreamOutput out) throws IOException {
|
||||||
ValueFormatterStreams.writeOptional(formatter, out);
|
ValueFormatterStreams.writeOptional(formatter, out);
|
||||||
gapPolicy.writeTo(out);
|
gapPolicy.writeTo(out);
|
||||||
out.writeLong(xAxisUnits);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Factory extends ReducerFactory {
|
public static class Factory extends ReducerFactory {
|
||||||
|
|
||||||
private final ValueFormatter formatter;
|
private final ValueFormatter formatter;
|
||||||
private GapPolicy gapPolicy;
|
private GapPolicy gapPolicy;
|
||||||
private long xAxisUnits;
|
|
||||||
|
|
||||||
public Factory(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter, GapPolicy gapPolicy, long xAxisUnits) {
|
public Factory(String name, String[] bucketsPaths, @Nullable ValueFormatter formatter, GapPolicy gapPolicy) {
|
||||||
super(name, TYPE.name(), bucketsPaths);
|
super(name, TYPE.name(), bucketsPaths);
|
||||||
this.formatter = formatter;
|
this.formatter = formatter;
|
||||||
this.gapPolicy = gapPolicy;
|
this.gapPolicy = gapPolicy;
|
||||||
this.xAxisUnits = xAxisUnits;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Reducer createInternal(Map<String, Object> metaData) throws IOException {
|
protected Reducer createInternal(Map<String, Object> metaData) throws IOException {
|
||||||
return new DerivativeReducer(name, bucketsPaths, formatter, gapPolicy, xAxisUnits, metaData);
|
return new DerivativeReducer(name, bucketsPaths, formatter, gapPolicy, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -45,7 +45,6 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHist
|
||||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
|
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
|
||||||
import static org.elasticsearch.search.aggregations.reducers.ReducerBuilders.derivative;
|
import static org.elasticsearch.search.aggregations.reducers.ReducerBuilders.derivative;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||||
import static org.hamcrest.Matchers.closeTo;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.core.IsNull.notNullValue;
|
import static org.hamcrest.core.IsNull.notNullValue;
|
||||||
|
@ -148,50 +147,6 @@ public class DateDerivativeTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(docCountDeriv.value(), equalTo(1d));
|
assertThat(docCountDeriv.value(), equalTo(1d));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void singleValuedField_normalised() throws Exception {
|
|
||||||
SearchResponse response = client()
|
|
||||||
.prepareSearch("idx")
|
|
||||||
.addAggregation(
|
|
||||||
dateHistogram("histo").field("date").interval(DateHistogramInterval.MONTH).minDocCount(0)
|
|
||||||
.subAggregation(derivative("deriv").setBucketsPaths("_count").units(DateHistogramInterval.DAY))).execute()
|
|
||||||
.actionGet();
|
|
||||||
|
|
||||||
assertSearchResponse(response);
|
|
||||||
|
|
||||||
InternalHistogram deriv = response.getAggregations().get("histo");
|
|
||||||
assertThat(deriv, notNullValue());
|
|
||||||
assertThat(deriv.getName(), equalTo("histo"));
|
|
||||||
List<? extends Bucket> buckets = deriv.getBuckets();
|
|
||||||
assertThat(buckets.size(), equalTo(3));
|
|
||||||
|
|
||||||
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
|
|
||||||
Histogram.Bucket bucket = buckets.get(0);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat((DateTime) bucket.getKey(), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
|
||||||
SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
|
|
||||||
assertThat(docCountDeriv, nullValue());
|
|
||||||
|
|
||||||
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
|
|
||||||
bucket = buckets.get(1);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat((DateTime) bucket.getKey(), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(2l));
|
|
||||||
docCountDeriv = bucket.getAggregations().get("deriv");
|
|
||||||
assertThat(docCountDeriv, notNullValue());
|
|
||||||
assertThat(docCountDeriv.value(), closeTo(1d / 31d, 0.00001));
|
|
||||||
|
|
||||||
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
|
|
||||||
bucket = buckets.get(2);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat((DateTime) bucket.getKey(), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(3l));
|
|
||||||
docCountDeriv = bucket.getAggregations().get("deriv");
|
|
||||||
assertThat(docCountDeriv, notNullValue());
|
|
||||||
assertThat(docCountDeriv.value(), closeTo(1d / 29d, 0.00001));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void singleValuedField_WithSubAggregation() throws Exception {
|
public void singleValuedField_WithSubAggregation() throws Exception {
|
||||||
SearchResponse response = client()
|
SearchResponse response = client()
|
||||||
|
|
|
@ -43,7 +43,6 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
|
||||||
import static org.elasticsearch.search.aggregations.reducers.ReducerBuilders.derivative;
|
import static org.elasticsearch.search.aggregations.reducers.ReducerBuilders.derivative;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||||
import static org.hamcrest.Matchers.closeTo;
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.core.IsNull.notNullValue;
|
import static org.hamcrest.core.IsNull.notNullValue;
|
||||||
import static org.hamcrest.core.IsNull.nullValue;
|
import static org.hamcrest.core.IsNull.nullValue;
|
||||||
|
@ -197,47 +196,6 @@ public class DerivativeTests extends ElasticsearchIntegrationTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* test first and second derivative on the sing
|
|
||||||
*/
|
|
||||||
@Test
|
|
||||||
public void singleValuedField_normalised() {
|
|
||||||
|
|
||||||
SearchResponse response = client()
|
|
||||||
.prepareSearch("idx")
|
|
||||||
.addAggregation(
|
|
||||||
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).minDocCount(0)
|
|
||||||
.subAggregation(derivative("deriv").setBucketsPaths("_count").units("1"))
|
|
||||||
.subAggregation(derivative("2nd_deriv").setBucketsPaths("deriv"))).execute().actionGet();
|
|
||||||
|
|
||||||
assertSearchResponse(response);
|
|
||||||
|
|
||||||
InternalHistogram<Bucket> deriv = response.getAggregations().get("histo");
|
|
||||||
assertThat(deriv, notNullValue());
|
|
||||||
assertThat(deriv.getName(), equalTo("histo"));
|
|
||||||
List<? extends Bucket> buckets = deriv.getBuckets();
|
|
||||||
assertThat(buckets.size(), equalTo(numValueBuckets));
|
|
||||||
|
|
||||||
for (int i = 0; i < numValueBuckets; ++i) {
|
|
||||||
Histogram.Bucket bucket = buckets.get(i);
|
|
||||||
checkBucketKeyAndDocCount("Bucket " + i, bucket, i * interval, valueCounts[i]);
|
|
||||||
SimpleValue docCountDeriv = bucket.getAggregations().get("deriv");
|
|
||||||
if (i > 0) {
|
|
||||||
assertThat(docCountDeriv, notNullValue());
|
|
||||||
assertThat(docCountDeriv.value(), closeTo((double) (firstDerivValueCounts[i - 1]) / 5, 0.00001));
|
|
||||||
} else {
|
|
||||||
assertThat(docCountDeriv, nullValue());
|
|
||||||
}
|
|
||||||
SimpleValue docCount2ndDeriv = bucket.getAggregations().get("2nd_deriv");
|
|
||||||
if (i > 1) {
|
|
||||||
assertThat(docCount2ndDeriv, notNullValue());
|
|
||||||
assertThat(docCount2ndDeriv.value(), closeTo((double) (secondDerivValueCounts[i - 2]) / 5, 0.00001));
|
|
||||||
} else {
|
|
||||||
assertThat(docCount2ndDeriv, nullValue());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void singleValuedField_WithSubAggregation() throws Exception {
|
public void singleValuedField_WithSubAggregation() throws Exception {
|
||||||
SearchResponse response = client()
|
SearchResponse response = client()
|
||||||
|
|
Loading…
Reference in New Issue