Aggregations: Add 'offset' option to date_histogram, replacing 'pre_offset' and 'post_offset'
Add offset option to 'date_histogram' replacing and simplifying the previous 'pre_offset' and 'post_offset' options. This change is part of a larger clean up task for `date_histogram` from issue #9062.
This commit is contained in:
parent
93df178469
commit
d2f852a274
|
@ -121,6 +121,10 @@ to all bucket aggregations:
|
||||||
* All other `getKeyAsX()` methods have been removed.
|
* All other `getKeyAsX()` methods have been removed.
|
||||||
* The `getBucketAsKey(String)` methods have been removed on all aggregations except the `filters` and `terms` aggregations.
|
* The `getBucketAsKey(String)` methods have been removed on all aggregations except the `filters` and `terms` aggregations.
|
||||||
|
|
||||||
|
The `histogram` and the `date_histogram` aggregation now support a simplified `offset` option that replaces the previous `pre_offset` and
|
||||||
|
`post_offset` rounding options. Instead of having to specify two separate offset shifts of the underlying buckets, the `offset` option
|
||||||
|
moves the bucket boundaries in positive or negative direction depending on its argument.
|
||||||
|
|
||||||
=== Terms filter lookup caching
|
=== Terms filter lookup caching
|
||||||
|
|
||||||
The terms filter lookup mechanism does not support the `cache` option anymore
|
The terms filter lookup mechanism does not support the `cache` option anymore
|
||||||
|
|
|
@ -72,10 +72,14 @@ set `pre_zone_adjust_large_interval` to `true`, which will apply the same conver
|
||||||
example, to day and above intervals (it can be set regardless of the interval, but only kick in when using day and
|
example, to day and above intervals (it can be set regardless of the interval, but only kick in when using day and
|
||||||
higher intervals).
|
higher intervals).
|
||||||
|
|
||||||
==== Pre/Post Offset
|
==== Offset
|
||||||
|
|
||||||
Specific offsets can be provided for pre rounding and post rounding. The `pre_offset` for pre rounding, and
|
The `offset` option can be provided for shifting the date bucket intervals boundaries after any other shifts because of
|
||||||
`post_offset` for post rounding. The format is the date time format (`1h`, `1d`, etc...).
|
time zones are applies. This for example makes it possible that daily buckets go from 6AM to 6AM the next day instead of starting at 12AM
|
||||||
|
or that monthly buckets go from the 10th of the month to the 10th of the next month instead of the 1st.
|
||||||
|
|
||||||
|
The `offset` option accepts positive or negative time durations like "1h" for an hour or "1M" for a Month. See <<time-units>> for more
|
||||||
|
possible time duration options.
|
||||||
|
|
||||||
==== Keys
|
==== Keys
|
||||||
|
|
||||||
|
|
|
@ -177,23 +177,21 @@ public abstract class Rounding implements Streamable {
|
||||||
out.writeFloat(factor);
|
out.writeFloat(factor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class PrePostRounding extends Rounding {
|
public static class OffsetRounding extends Rounding {
|
||||||
|
|
||||||
final static byte ID = 8;
|
final static byte ID = 8;
|
||||||
|
|
||||||
private Rounding rounding;
|
private Rounding rounding;
|
||||||
|
|
||||||
private long preOffset;
|
private long offset;
|
||||||
private long postOffset;
|
|
||||||
|
|
||||||
PrePostRounding() { // for serialization
|
OffsetRounding() { // for serialization
|
||||||
}
|
}
|
||||||
|
|
||||||
public PrePostRounding(Rounding intervalRounding, long preOffset, long postOffset) {
|
public OffsetRounding(Rounding intervalRounding, long offset) {
|
||||||
this.rounding = intervalRounding;
|
this.rounding = intervalRounding;
|
||||||
this.preOffset = preOffset;
|
this.offset = offset;
|
||||||
this.postOffset = postOffset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -203,41 +201,29 @@ public abstract class Rounding implements Streamable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long roundKey(long value) {
|
public long roundKey(long value) {
|
||||||
return rounding.roundKey(value + preOffset);
|
return rounding.roundKey(value - offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long valueForKey(long key) {
|
public long valueForKey(long key) {
|
||||||
return postOffset + rounding.valueForKey(key);
|
return offset + rounding.valueForKey(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long nextRoundingValue(long value) {
|
public long nextRoundingValue(long value) {
|
||||||
return postOffset + rounding.nextRoundingValue(value - postOffset);
|
return rounding.nextRoundingValue(value - offset) + offset;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
public void readFrom(StreamInput in) throws IOException {
|
||||||
rounding = Rounding.Streams.read(in);
|
rounding = Rounding.Streams.read(in);
|
||||||
if (in.getVersion().before(Version.V_1_4_0_Beta1)) {
|
offset = in.readLong();
|
||||||
preOffset = in.readVLong();
|
|
||||||
postOffset = in.readVLong();
|
|
||||||
} else {
|
|
||||||
preOffset = in.readLong();
|
|
||||||
postOffset = in.readLong();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
Rounding.Streams.write(rounding, out);
|
Rounding.Streams.write(rounding, out);
|
||||||
if (out.getVersion().before(Version.V_1_4_0_Beta1)) {
|
out.writeLong(offset);
|
||||||
out.writeVLong(preOffset);
|
|
||||||
out.writeVLong(postOffset);
|
|
||||||
} else {
|
|
||||||
out.writeLong(preOffset);
|
|
||||||
out.writeLong(postOffset);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,7 +246,7 @@ public abstract class Rounding implements Streamable {
|
||||||
case TimeZoneRounding.TimeIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.TimeIntervalTimeZoneRounding(); break;
|
case TimeZoneRounding.TimeIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.TimeIntervalTimeZoneRounding(); break;
|
||||||
case TimeZoneRounding.DayIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.DayIntervalTimeZoneRounding(); break;
|
case TimeZoneRounding.DayIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.DayIntervalTimeZoneRounding(); break;
|
||||||
case TimeZoneRounding.FactorRounding.ID: rounding = new FactorRounding(); break;
|
case TimeZoneRounding.FactorRounding.ID: rounding = new FactorRounding(); break;
|
||||||
case PrePostRounding.ID: rounding = new PrePostRounding(); break;
|
case OffsetRounding.ID: rounding = new OffsetRounding(); break;
|
||||||
default: throw new ElasticsearchException("unknown rounding id [" + id + "]");
|
default: throw new ElasticsearchException("unknown rounding id [" + id + "]");
|
||||||
}
|
}
|
||||||
rounding.readFrom(in);
|
rounding.readFrom(in);
|
||||||
|
|
|
@ -51,8 +51,7 @@ public abstract class TimeZoneRounding extends Rounding {
|
||||||
|
|
||||||
private float factor = 1.0f;
|
private float factor = 1.0f;
|
||||||
|
|
||||||
private long preOffset;
|
private long offset;
|
||||||
private long postOffset;
|
|
||||||
|
|
||||||
private boolean preZoneAdjustLargeInterval = false;
|
private boolean preZoneAdjustLargeInterval = false;
|
||||||
|
|
||||||
|
@ -81,13 +80,8 @@ public abstract class TimeZoneRounding extends Rounding {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder preOffset(long preOffset) {
|
public Builder offset(long offset) {
|
||||||
this.preOffset = preOffset;
|
this.offset = offset;
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder postOffset(long postOffset) {
|
|
||||||
this.postOffset = postOffset;
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -115,8 +109,8 @@ public abstract class TimeZoneRounding extends Rounding {
|
||||||
timeZoneRounding = new DayIntervalTimeZoneRounding(interval, preTz, postTz);
|
timeZoneRounding = new DayIntervalTimeZoneRounding(interval, preTz, postTz);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (preOffset != 0 || postOffset != 0) {
|
if (offset != 0) {
|
||||||
timeZoneRounding = new PrePostRounding(timeZoneRounding, preOffset, postOffset);
|
timeZoneRounding = new OffsetRounding(timeZoneRounding, offset);
|
||||||
}
|
}
|
||||||
if (factor != 1.0f) {
|
if (factor != 1.0f) {
|
||||||
timeZoneRounding = new FactorRounding(timeZoneRounding, factor);
|
timeZoneRounding = new FactorRounding(timeZoneRounding, factor);
|
||||||
|
|
|
@ -41,8 +41,7 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
|
||||||
private String postZone;
|
private String postZone;
|
||||||
private boolean preZoneAdjustLargeInterval;
|
private boolean preZoneAdjustLargeInterval;
|
||||||
private String format;
|
private String format;
|
||||||
private String preOffset;
|
private String offset;
|
||||||
private String postOffset;
|
|
||||||
private float factor = 1.0f;
|
private float factor = 1.0f;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -110,19 +109,12 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the offset to apply prior to computing buckets.
|
* @param offset sets the offset of time intervals in this histogram
|
||||||
|
* @return the current builder
|
||||||
*/
|
*/
|
||||||
public DateHistogramBuilder preOffset(String preOffset) {
|
public DateHistogramBuilder offset(String offset) {
|
||||||
this.preOffset = preOffset;
|
this.offset = offset;
|
||||||
return this;
|
return this;
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the offset to apply after having computed buckets.
|
|
||||||
*/
|
|
||||||
public DateHistogramBuilder postOffset(String postOffset) {
|
|
||||||
this.postOffset = postOffset;
|
|
||||||
return this;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -206,12 +198,8 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
|
||||||
builder.field("pre_zone_adjust_large_interval", true);
|
builder.field("pre_zone_adjust_large_interval", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (preOffset != null) {
|
if (offset != null) {
|
||||||
builder.field("pre_offset", preOffset);
|
builder.field("offset", offset);
|
||||||
}
|
|
||||||
|
|
||||||
if (postOffset != null) {
|
|
||||||
builder.field("post_offset", postOffset);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (factor != 1.0f) {
|
if (factor != 1.0f) {
|
||||||
|
@ -235,5 +223,4 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
|
||||||
|
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,8 +88,7 @@ public class DateHistogramParser implements Aggregator.Parser {
|
||||||
boolean preZoneAdjustLargeInterval = false;
|
boolean preZoneAdjustLargeInterval = false;
|
||||||
DateTimeZone preZone = DateTimeZone.UTC;
|
DateTimeZone preZone = DateTimeZone.UTC;
|
||||||
DateTimeZone postZone = DateTimeZone.UTC;
|
DateTimeZone postZone = DateTimeZone.UTC;
|
||||||
long preOffset = 0;
|
long offset = 0;
|
||||||
long postOffset = 0;
|
|
||||||
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
|
@ -105,10 +104,8 @@ public class DateHistogramParser implements Aggregator.Parser {
|
||||||
preZone = DateTimeZone.forID(parser.text());
|
preZone = DateTimeZone.forID(parser.text());
|
||||||
} else if ("post_zone".equals(currentFieldName) || "postZone".equals(currentFieldName)) {
|
} else if ("post_zone".equals(currentFieldName) || "postZone".equals(currentFieldName)) {
|
||||||
postZone = DateTimeZone.forID(parser.text());
|
postZone = DateTimeZone.forID(parser.text());
|
||||||
} else if ("pre_offset".equals(currentFieldName) || "preOffset".equals(currentFieldName)) {
|
} else if ("offset".equals(currentFieldName)) {
|
||||||
preOffset = parseOffset(parser.text());
|
offset = parseOffset(parser.text());
|
||||||
} else if ("post_offset".equals(currentFieldName) || "postOffset".equals(currentFieldName)) {
|
|
||||||
postOffset = parseOffset(parser.text());
|
|
||||||
} else if ("interval".equals(currentFieldName)) {
|
} else if ("interval".equals(currentFieldName)) {
|
||||||
interval = parser.text();
|
interval = parser.text();
|
||||||
} else {
|
} else {
|
||||||
|
@ -196,8 +193,7 @@ public class DateHistogramParser implements Aggregator.Parser {
|
||||||
Rounding rounding = tzRoundingBuilder
|
Rounding rounding = tzRoundingBuilder
|
||||||
.preZone(preZone).postZone(postZone)
|
.preZone(preZone).postZone(postZone)
|
||||||
.preZoneAdjustLargeInterval(preZoneAdjustLargeInterval)
|
.preZoneAdjustLargeInterval(preZoneAdjustLargeInterval)
|
||||||
.preOffset(preOffset).postOffset(postOffset)
|
.offset(offset).build();
|
||||||
.build();
|
|
||||||
|
|
||||||
return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds,
|
return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds,
|
||||||
new InternalDateHistogram.Factory());
|
new InternalDateHistogram.Factory());
|
||||||
|
|
|
@ -121,7 +121,7 @@ public class HistogramParser implements Aggregator.Parser {
|
||||||
|
|
||||||
Rounding rounding = new Rounding.Interval(interval);
|
Rounding rounding = new Rounding.Interval(interval);
|
||||||
if (offset != 0) {
|
if (offset != 0) {
|
||||||
rounding = new Rounding.PrePostRounding((Rounding.Interval) rounding, -offset, offset);
|
rounding = new Rounding.OffsetRounding((Rounding.Interval) rounding, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (extendedBounds != null) {
|
if (extendedBounds != null) {
|
||||||
|
|
|
@ -22,13 +22,14 @@ package org.elasticsearch.common.rounding;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.greaterThan;
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||||
|
|
||||||
public class RoundingTests extends ElasticsearchTestCase {
|
public class RoundingTests extends ElasticsearchTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* simple testcase to ilustrate how Rounding.Interval works on readable input
|
* simple test case to illustrate how Rounding.Interval works on readable input
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testInterval() {
|
public void testInterval() {
|
||||||
|
@ -60,42 +61,49 @@ public class RoundingTests extends ElasticsearchTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Simple testcase to ilustrate how Rounding.Pre works on readable input.
|
* Simple test case to illustrate how Rounding.Offset works on readable input.
|
||||||
* preOffset shifts input value before rounding (so here 24 -> 31)
|
* offset shifts input value back before rounding (so here 6 - 7 -> -1)
|
||||||
* postOffset shifts rounded Value after rounding (here 30 -> 35)
|
* then shifts rounded Value back (here -10 -> -3)
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testPrePostRounding() {
|
public void testOffsetRounding() {
|
||||||
int interval = 10;
|
final long interval = 10;
|
||||||
int value = 24;
|
final long offset = 7;
|
||||||
int preOffset = 7;
|
Rounding.OffsetRounding rounding = new Rounding.OffsetRounding(new Rounding.Interval(interval), offset);
|
||||||
int postOffset = 5;
|
assertEquals(-1, rounding.roundKey(6));
|
||||||
Rounding.PrePostRounding rounding = new Rounding.PrePostRounding(new Rounding.Interval(interval), preOffset, postOffset);
|
assertEquals(-3, rounding.round(6));
|
||||||
final long key = rounding.roundKey(24);
|
assertEquals(7, rounding.nextRoundingValue(-3));
|
||||||
final long roundedValue = rounding.round(24);
|
assertEquals(0, rounding.roundKey(7));
|
||||||
String message = "round(" + value + ", interval=" + interval + ") = " + roundedValue;
|
assertEquals(7, rounding.round(7));
|
||||||
assertEquals(3, key);
|
assertEquals(17, rounding.nextRoundingValue(7));
|
||||||
assertEquals(35, roundedValue);
|
assertEquals(0, rounding.roundKey(16));
|
||||||
assertEquals(message, postOffset, roundedValue % interval);
|
assertEquals(7, rounding.round(16));
|
||||||
|
assertEquals(1, rounding.roundKey(17));
|
||||||
|
assertEquals(17, rounding.round(17));
|
||||||
|
assertEquals(27, rounding.nextRoundingValue(17));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* test OffsetRounding with an internal interval rounding on random inputs
|
||||||
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testPrePostRoundingRandom() {
|
public void testOffsetRoundingRandom() {
|
||||||
final long interval = randomIntBetween(1, 100);
|
|
||||||
Rounding.Interval internalRounding = new Rounding.Interval(interval);
|
|
||||||
final long preRounding = randomIntBetween(-100, 100);
|
|
||||||
final long postRounding = randomIntBetween(-100, 100);
|
|
||||||
Rounding.PrePostRounding prePost = new Rounding.PrePostRounding(new Rounding.Interval(interval), preRounding, postRounding);
|
|
||||||
long safetyMargin = Math.abs(interval) + Math.abs(preRounding) + Math.abs(postRounding); // to prevent range overflow / underflow
|
|
||||||
for (int i = 0; i < 1000; ++i) {
|
for (int i = 0; i < 1000; ++i) {
|
||||||
long l = Math.max(randomLong() - safetyMargin, Long.MIN_VALUE + safetyMargin);
|
final long interval = randomIntBetween(1, 100);
|
||||||
final long key = prePost.roundKey(l);
|
Rounding.Interval internalRounding = new Rounding.Interval(interval);
|
||||||
final long r = prePost.round(l);
|
final long offset = randomIntBetween(-100, 100);
|
||||||
String message = "round(" + l + ", interval=" + interval + ") = "+ r;
|
Rounding.OffsetRounding rounding = new Rounding.OffsetRounding(internalRounding, offset);
|
||||||
assertEquals(message, internalRounding.round(l+preRounding), r - postRounding);
|
long safetyMargin = Math.abs(interval) + Math.abs(offset); // to prevent range overflow
|
||||||
assertThat(message, r - postRounding, lessThanOrEqualTo(l + preRounding));
|
long value = Math.max(randomLong() - safetyMargin, Long.MIN_VALUE + safetyMargin);
|
||||||
assertThat(message, r + interval - postRounding, greaterThan(l + preRounding));
|
final long key = rounding.roundKey(value);
|
||||||
assertEquals(message, r, key*interval + postRounding);
|
final long key_next = rounding.roundKey(value + interval);
|
||||||
|
final long r_value = rounding.round(value);
|
||||||
|
final long nextRoundingValue = rounding.nextRoundingValue(r_value);
|
||||||
|
assertThat("Rounding should be idempotent", r_value, equalTo(rounding.round(r_value)));
|
||||||
|
assertThat("Rounded value smaller than unrounded, regardless of offset", r_value - offset, lessThanOrEqualTo(value - offset));
|
||||||
|
assertThat("Key and next_key should differ by one", key_next - key, equalTo(1L));
|
||||||
|
assertThat("Rounded value <= value < next interval start", r_value + interval, greaterThan(value));
|
||||||
|
assertThat("NextRounding value should be interval from rounded value", r_value + interval, equalTo(nextRoundingValue));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class TimeZoneRoundingTests extends ElasticsearchTestCase {
|
||||||
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-09T00:00:00.000Z")));
|
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-09T00:00:00.000Z")));
|
||||||
assertThat(tzRounding.nextRoundingValue(utc("2012-01-09T00:00:00.000Z")), equalTo(utc("2012-01-16T00:00:00.000Z")));
|
assertThat(tzRounding.nextRoundingValue(utc("2012-01-09T00:00:00.000Z")), equalTo(utc("2012-01-16T00:00:00.000Z")));
|
||||||
|
|
||||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).postOffset(-TimeValue.timeValueHours(24).millis()).build();
|
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).offset(-TimeValue.timeValueHours(24).millis()).build();
|
||||||
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-08T00:00:00.000Z")));
|
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-08T00:00:00.000Z")));
|
||||||
assertThat(tzRounding.nextRoundingValue(utc("2012-01-08T00:00:00.000Z")), equalTo(utc("2012-01-15T00:00:00.000Z")));
|
assertThat(tzRounding.nextRoundingValue(utc("2012-01-08T00:00:00.000Z")), equalTo(utc("2012-01-15T00:00:00.000Z")));
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,10 +31,12 @@ import org.elasticsearch.test.transport.AssertingLocalTransport;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.concurrent.ExecutionException;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||||
|
@ -43,14 +45,16 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.core.IsNull.notNullValue;
|
import static org.hamcrest.core.IsNull.notNullValue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The serialisation of pre and post offsets for the date histogram aggregation was corrected in version 1.4 to allow negative offsets and as such the
|
* The serialisation of offsets for the date histogram aggregation was corrected in version 1.4 to allow negative offsets and as such the
|
||||||
* serialisation of negative offsets in these tests would break in pre 1.4 versions. These tests are separated from the other DateHistogramTests so the
|
* serialisation of negative offsets in these tests would break in pre 1.4 versions. These tests are separated from the other DateHistogramTests so the
|
||||||
* AssertingLocalTransport for these tests can be set to only use versions 1.4 onwards while keeping the other tests using all versions
|
* AssertingLocalTransport for these tests can be set to only use versions 1.4 onwards while keeping the other tests using all versions
|
||||||
*/
|
*/
|
||||||
@ElasticsearchIntegrationTest.SuiteScopeTest
|
@ElasticsearchIntegrationTest.SuiteScopeTest
|
||||||
@ElasticsearchIntegrationTest.ClusterScope(scope=ElasticsearchIntegrationTest.Scope.SUITE)
|
@ElasticsearchIntegrationTest.ClusterScope(scope=ElasticsearchIntegrationTest.Scope.SUITE)
|
||||||
public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
|
public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
|
private static final String DATE_FORMAT = "YY-MM-DD:hh-mm-ss";
|
||||||
|
|
||||||
private DateTime date(String date) {
|
private DateTime date(String date) {
|
||||||
return DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date);
|
return DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date);
|
||||||
}
|
}
|
||||||
|
@ -62,29 +66,36 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
|
||||||
.put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_1_4_0_Beta1).build();
|
.put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_1_4_0_Beta1).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void beforeEachTest() throws IOException {
|
||||||
|
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||||
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void afterEachTest() throws IOException {
|
public void afterEachTest() throws IOException {
|
||||||
internalCluster().wipeIndices("idx2");
|
internalCluster().wipeIndices("idx2");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
private void prepareIndex(DateTime date, int numHours, int stepSizeHours, int idxIdStart) throws IOException, InterruptedException, ExecutionException {
|
||||||
public void singleValue_WithPreOffset() throws Exception {
|
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
|
||||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
|
||||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
date = date.plusHours(stepSizeHours);
|
||||||
for (int i = 0; i < reqs.length; i++) {
|
|
||||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
|
||||||
date = date.plusHours(1);
|
|
||||||
}
|
}
|
||||||
indexRandom(true, reqs);
|
indexRandom(true, reqs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void singleValue_WithPositiveOffset() throws Exception {
|
||||||
|
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, 1, 0);
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx2")
|
SearchResponse response = client().prepareSearch("idx2")
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(dateHistogram("date_histo")
|
.addAggregation(dateHistogram("date_histo")
|
||||||
.field("date")
|
.field("date")
|
||||||
.preOffset("-2h")
|
.offset("2h")
|
||||||
.interval(DateHistogramInterval.DAY)
|
.format(DATE_FORMAT)
|
||||||
.format("yyyy-MM-dd"))
|
.interval(DateHistogramInterval.DAY))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||||
|
@ -93,143 +104,74 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
|
||||||
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
|
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
|
||||||
assertThat(buckets.size(), equalTo(2));
|
assertThat(buckets.size(), equalTo(2));
|
||||||
|
|
||||||
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
|
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2l);
|
||||||
Histogram.Bucket bucket = buckets.get(0);
|
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3l);
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10"));
|
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(2l));
|
|
||||||
|
|
||||||
key = new DateTime(2014, 3, 11, 0, 0, DateTimeZone.UTC);
|
|
||||||
bucket = buckets.get(1);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11"));
|
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(3l));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void singleValue_WithPreOffset_MinDocCount() throws Exception {
|
public void singleValue_WithNegativeOffset() throws Exception {
|
||||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, -1, 0);
|
||||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
|
||||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
|
||||||
for (int i = 0; i < reqs.length; i++) {
|
|
||||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
|
||||||
date = date.plusHours(1);
|
|
||||||
}
|
|
||||||
indexRandom(true, reqs);
|
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx2")
|
SearchResponse response = client().prepareSearch("idx2")
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(dateHistogram("date_histo")
|
.addAggregation(dateHistogram("date_histo")
|
||||||
.field("date")
|
.field("date")
|
||||||
.preOffset("-2h")
|
.offset("-2h")
|
||||||
|
.format(DATE_FORMAT)
|
||||||
|
.interval(DateHistogramInterval.DAY))
|
||||||
|
.execute().actionGet();
|
||||||
|
|
||||||
|
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||||
|
|
||||||
|
Histogram histo = response.getAggregations().get("date_histo");
|
||||||
|
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
|
||||||
|
assertThat(buckets.size(), equalTo(2));
|
||||||
|
|
||||||
|
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2l);
|
||||||
|
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3l);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set offset so day buckets start at 6am. Index first 12 hours for two days, with one day gap.
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void singleValue_WithOffset_MinDocCount() throws Exception {
|
||||||
|
prepareIndex(date("2014-03-11T00:00:00+00:00"), 12, 1, 0);
|
||||||
|
prepareIndex(date("2014-03-14T00:00:00+00:00"), 12, 1, 13);
|
||||||
|
|
||||||
|
SearchResponse response = client().prepareSearch("idx2")
|
||||||
|
.setQuery(matchAllQuery())
|
||||||
|
.addAggregation(dateHistogram("date_histo")
|
||||||
|
.field("date")
|
||||||
|
.offset("6h")
|
||||||
.minDocCount(0)
|
.minDocCount(0)
|
||||||
.interval(DateHistogramInterval.DAY)
|
.format(DATE_FORMAT)
|
||||||
.format("yyyy-MM-dd"))
|
.interval(DateHistogramInterval.DAY))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
assertThat(response.getHits().getTotalHits(), equalTo(24l));
|
||||||
|
|
||||||
Histogram histo = response.getAggregations().get("date_histo");
|
Histogram histo = response.getAggregations().get("date_histo");
|
||||||
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
|
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
|
||||||
assertThat(buckets.size(), equalTo(2));
|
assertThat(buckets.size(), equalTo(5));
|
||||||
|
|
||||||
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
|
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 6, 0, DateTimeZone.UTC), 6L);
|
||||||
Histogram.Bucket bucket = buckets.get(0);
|
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 6, 0, DateTimeZone.UTC), 6L);
|
||||||
assertThat(bucket, notNullValue());
|
checkBucketFor(buckets.get(2), new DateTime(2014, 3, 12, 6, 0, DateTimeZone.UTC), 0L);
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10"));
|
checkBucketFor(buckets.get(3), new DateTime(2014, 3, 13, 6, 0, DateTimeZone.UTC), 6L);
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
checkBucketFor(buckets.get(4), new DateTime(2014, 3, 14, 6, 0, DateTimeZone.UTC), 6L);
|
||||||
assertThat(bucket.getDocCount(), equalTo(2l));
|
|
||||||
|
|
||||||
key = new DateTime(2014, 3, 11, 0, 0, DateTimeZone.UTC);
|
|
||||||
bucket = buckets.get(1);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11"));
|
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(3l));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
/**
|
||||||
public void singleValue_WithPostOffset() throws Exception {
|
* @param bucket the bucket to check asssertions for
|
||||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
* @param key the expected key
|
||||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
* @param expectedSize the expected size of the bucket
|
||||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
*/
|
||||||
for (int i = 0; i < reqs.length; i++) {
|
private static void checkBucketFor(Histogram.Bucket bucket, DateTime key, long expectedSize) {
|
||||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
|
||||||
date = date.plusHours(6);
|
|
||||||
}
|
|
||||||
indexRandom(true, reqs);
|
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx2")
|
|
||||||
.setQuery(matchAllQuery())
|
|
||||||
.addAggregation(dateHistogram("date_histo")
|
|
||||||
.field("date")
|
|
||||||
.postOffset("2d")
|
|
||||||
.interval(DateHistogramInterval.DAY)
|
|
||||||
.format("yyyy-MM-dd"))
|
|
||||||
.execute().actionGet();
|
|
||||||
|
|
||||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
|
||||||
|
|
||||||
Histogram histo = response.getAggregations().get("date_histo");
|
|
||||||
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
|
|
||||||
assertThat(buckets.size(), equalTo(2));
|
|
||||||
|
|
||||||
DateTime key = new DateTime(2014, 3, 13, 0, 0, DateTimeZone.UTC);
|
|
||||||
Histogram.Bucket bucket = buckets.get(0);
|
|
||||||
assertThat(bucket, notNullValue());
|
assertThat(bucket, notNullValue());
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-13"));
|
assertThat(bucket.getKeyAsString(), equalTo(key.toString(DATE_FORMAT)));
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
||||||
assertThat(bucket.getDocCount(), equalTo(4l));
|
assertThat(bucket.getDocCount(), equalTo(expectedSize));
|
||||||
|
|
||||||
key = new DateTime(2014, 3, 14, 0, 0, DateTimeZone.UTC);
|
|
||||||
bucket = buckets.get(1);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-14"));
|
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void singleValue_WithPostOffset_MinDocCount() throws Exception {
|
|
||||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
|
||||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
|
||||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
|
||||||
for (int i = 0; i < reqs.length; i++) {
|
|
||||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
|
||||||
date = date.plusHours(6);
|
|
||||||
}
|
|
||||||
indexRandom(true, reqs);
|
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx2")
|
|
||||||
.setQuery(matchAllQuery())
|
|
||||||
.addAggregation(dateHistogram("date_histo")
|
|
||||||
.field("date")
|
|
||||||
.postOffset("2d")
|
|
||||||
.minDocCount(0)
|
|
||||||
.interval(DateHistogramInterval.DAY)
|
|
||||||
.format("yyyy-MM-dd"))
|
|
||||||
.execute().actionGet();
|
|
||||||
|
|
||||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
|
||||||
|
|
||||||
Histogram histo = response.getAggregations().get("date_histo");
|
|
||||||
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
|
|
||||||
assertThat(buckets.size(), equalTo(2));
|
|
||||||
|
|
||||||
DateTime key = new DateTime(2014, 3, 13, 0, 0, DateTimeZone.UTC);
|
|
||||||
Histogram.Bucket bucket = buckets.get(0);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-13"));
|
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(4l));
|
|
||||||
|
|
||||||
key = new DateTime(2014, 3, 14, 0, 0, DateTimeZone.UTC);
|
|
||||||
bucket = buckets.get(1);
|
|
||||||
assertThat(bucket, notNullValue());
|
|
||||||
assertThat(bucket.getKeyAsString(), equalTo("2014-03-14"));
|
|
||||||
assertThat(((DateTime) bucket.getKey()), equalTo(key));
|
|
||||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue