Aggregations: Add 'offset' option to date_histogram, replacing 'pre_offset' and 'post_offset'

Add offset option to 'date_histogram' replacing and simplifying the previous 'pre_offset' and 'post_offset' options.
This change is part of a larger clean up task for `date_histogram` from issue #9062.
This commit is contained in:
Christoph Büscher 2015-02-03 14:06:50 +01:00
parent 93df178469
commit d2f852a274
10 changed files with 155 additions and 234 deletions

View File

@ -121,6 +121,10 @@ to all bucket aggregations:
* All other `getKeyAsX()` methods have been removed.
* The `getBucketAsKey(String)` methods have been removed on all aggregations except the `filters` and `terms` aggregations.
The `histogram` and the `date_histogram` aggregation now support a simplified `offset` option that replaces the previous `pre_offset` and
`post_offset` rounding options. Instead of having to specify two separate offset shifts of the underlying buckets, the `offset` option
moves the bucket boundaries in positive or negative direction depending on its argument.
=== Terms filter lookup caching
The terms filter lookup mechanism does not support the `cache` option anymore

View File

@ -72,10 +72,14 @@ set `pre_zone_adjust_large_interval` to `true`, which will apply the same conver
example, to day and above intervals (it can be set regardless of the interval, but only kick in when using day and
higher intervals).
==== Pre/Post Offset
==== Offset
Specific offsets can be provided for pre rounding and post rounding. The `pre_offset` for pre rounding, and
`post_offset` for post rounding. The format is the date time format (`1h`, `1d`, etc...).
The `offset` option can be provided for shifting the date bucket intervals boundaries after any other shifts because of
time zones are applies. This for example makes it possible that daily buckets go from 6AM to 6AM the next day instead of starting at 12AM
or that monthly buckets go from the 10th of the month to the 10th of the next month instead of the 1st.
The `offset` option accepts positive or negative time durations like "1h" for an hour or "1M" for a Month. See <<time-units>> for more
possible time duration options.
==== Keys

View File

@ -177,23 +177,21 @@ public abstract class Rounding implements Streamable {
out.writeFloat(factor);
}
}
public static class PrePostRounding extends Rounding {
public static class OffsetRounding extends Rounding {
final static byte ID = 8;
private Rounding rounding;
private long preOffset;
private long postOffset;
private long offset;
PrePostRounding() { // for serialization
OffsetRounding() { // for serialization
}
public PrePostRounding(Rounding intervalRounding, long preOffset, long postOffset) {
public OffsetRounding(Rounding intervalRounding, long offset) {
this.rounding = intervalRounding;
this.preOffset = preOffset;
this.postOffset = postOffset;
this.offset = offset;
}
@Override
@ -203,41 +201,29 @@ public abstract class Rounding implements Streamable {
@Override
public long roundKey(long value) {
return rounding.roundKey(value + preOffset);
return rounding.roundKey(value - offset);
}
@Override
public long valueForKey(long key) {
return postOffset + rounding.valueForKey(key);
return offset + rounding.valueForKey(key);
}
@Override
public long nextRoundingValue(long value) {
return postOffset + rounding.nextRoundingValue(value - postOffset);
return rounding.nextRoundingValue(value - offset) + offset;
}
@Override
public void readFrom(StreamInput in) throws IOException {
rounding = Rounding.Streams.read(in);
if (in.getVersion().before(Version.V_1_4_0_Beta1)) {
preOffset = in.readVLong();
postOffset = in.readVLong();
} else {
preOffset = in.readLong();
postOffset = in.readLong();
}
offset = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
Rounding.Streams.write(rounding, out);
if (out.getVersion().before(Version.V_1_4_0_Beta1)) {
out.writeVLong(preOffset);
out.writeVLong(postOffset);
} else {
out.writeLong(preOffset);
out.writeLong(postOffset);
}
out.writeLong(offset);
}
}
@ -260,7 +246,7 @@ public abstract class Rounding implements Streamable {
case TimeZoneRounding.TimeIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.TimeIntervalTimeZoneRounding(); break;
case TimeZoneRounding.DayIntervalTimeZoneRounding.ID: rounding = new TimeZoneRounding.DayIntervalTimeZoneRounding(); break;
case TimeZoneRounding.FactorRounding.ID: rounding = new FactorRounding(); break;
case PrePostRounding.ID: rounding = new PrePostRounding(); break;
case OffsetRounding.ID: rounding = new OffsetRounding(); break;
default: throw new ElasticsearchException("unknown rounding id [" + id + "]");
}
rounding.readFrom(in);

View File

@ -51,8 +51,7 @@ public abstract class TimeZoneRounding extends Rounding {
private float factor = 1.0f;
private long preOffset;
private long postOffset;
private long offset;
private boolean preZoneAdjustLargeInterval = false;
@ -81,13 +80,8 @@ public abstract class TimeZoneRounding extends Rounding {
return this;
}
public Builder preOffset(long preOffset) {
this.preOffset = preOffset;
return this;
}
public Builder postOffset(long postOffset) {
this.postOffset = postOffset;
public Builder offset(long offset) {
this.offset = offset;
return this;
}
@ -115,8 +109,8 @@ public abstract class TimeZoneRounding extends Rounding {
timeZoneRounding = new DayIntervalTimeZoneRounding(interval, preTz, postTz);
}
}
if (preOffset != 0 || postOffset != 0) {
timeZoneRounding = new PrePostRounding(timeZoneRounding, preOffset, postOffset);
if (offset != 0) {
timeZoneRounding = new OffsetRounding(timeZoneRounding, offset);
}
if (factor != 1.0f) {
timeZoneRounding = new FactorRounding(timeZoneRounding, factor);

View File

@ -41,8 +41,7 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
private String postZone;
private boolean preZoneAdjustLargeInterval;
private String format;
private String preOffset;
private String postOffset;
private String offset;
private float factor = 1.0f;
/**
@ -110,19 +109,12 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
}
/**
* Set the offset to apply prior to computing buckets.
* @param offset sets the offset of time intervals in this histogram
* @return the current builder
*/
public DateHistogramBuilder preOffset(String preOffset) {
this.preOffset = preOffset;
return this;
}
/**
* Set the offset to apply after having computed buckets.
*/
public DateHistogramBuilder postOffset(String postOffset) {
this.postOffset = postOffset;
return this;
public DateHistogramBuilder offset(String offset) {
this.offset = offset;
return this;
}
/**
@ -206,12 +198,8 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
builder.field("pre_zone_adjust_large_interval", true);
}
if (preOffset != null) {
builder.field("pre_offset", preOffset);
}
if (postOffset != null) {
builder.field("post_offset", postOffset);
if (offset != null) {
builder.field("offset", offset);
}
if (factor != 1.0f) {
@ -235,5 +223,4 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
return builder;
}
}

View File

@ -88,8 +88,7 @@ public class DateHistogramParser implements Aggregator.Parser {
boolean preZoneAdjustLargeInterval = false;
DateTimeZone preZone = DateTimeZone.UTC;
DateTimeZone postZone = DateTimeZone.UTC;
long preOffset = 0;
long postOffset = 0;
long offset = 0;
XContentParser.Token token;
String currentFieldName = null;
@ -105,10 +104,8 @@ public class DateHistogramParser implements Aggregator.Parser {
preZone = DateTimeZone.forID(parser.text());
} else if ("post_zone".equals(currentFieldName) || "postZone".equals(currentFieldName)) {
postZone = DateTimeZone.forID(parser.text());
} else if ("pre_offset".equals(currentFieldName) || "preOffset".equals(currentFieldName)) {
preOffset = parseOffset(parser.text());
} else if ("post_offset".equals(currentFieldName) || "postOffset".equals(currentFieldName)) {
postOffset = parseOffset(parser.text());
} else if ("offset".equals(currentFieldName)) {
offset = parseOffset(parser.text());
} else if ("interval".equals(currentFieldName)) {
interval = parser.text();
} else {
@ -196,8 +193,7 @@ public class DateHistogramParser implements Aggregator.Parser {
Rounding rounding = tzRoundingBuilder
.preZone(preZone).postZone(postZone)
.preZoneAdjustLargeInterval(preZoneAdjustLargeInterval)
.preOffset(preOffset).postOffset(postOffset)
.build();
.offset(offset).build();
return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds,
new InternalDateHistogram.Factory());

View File

@ -121,7 +121,7 @@ public class HistogramParser implements Aggregator.Parser {
Rounding rounding = new Rounding.Interval(interval);
if (offset != 0) {
rounding = new Rounding.PrePostRounding((Rounding.Interval) rounding, -offset, offset);
rounding = new Rounding.OffsetRounding((Rounding.Interval) rounding, offset);
}
if (extendedBounds != null) {

View File

@ -22,13 +22,14 @@ package org.elasticsearch.common.rounding;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class RoundingTests extends ElasticsearchTestCase {
/**
* simple testcase to ilustrate how Rounding.Interval works on readable input
* simple test case to illustrate how Rounding.Interval works on readable input
*/
@Test
public void testInterval() {
@ -60,42 +61,49 @@ public class RoundingTests extends ElasticsearchTestCase {
}
/**
* Simple testcase to ilustrate how Rounding.Pre works on readable input.
* preOffset shifts input value before rounding (so here 24 -> 31)
* postOffset shifts rounded Value after rounding (here 30 -> 35)
* Simple test case to illustrate how Rounding.Offset works on readable input.
* offset shifts input value back before rounding (so here 6 - 7 -> -1)
* then shifts rounded Value back (here -10 -> -3)
*/
@Test
public void testPrePostRounding() {
int interval = 10;
int value = 24;
int preOffset = 7;
int postOffset = 5;
Rounding.PrePostRounding rounding = new Rounding.PrePostRounding(new Rounding.Interval(interval), preOffset, postOffset);
final long key = rounding.roundKey(24);
final long roundedValue = rounding.round(24);
String message = "round(" + value + ", interval=" + interval + ") = " + roundedValue;
assertEquals(3, key);
assertEquals(35, roundedValue);
assertEquals(message, postOffset, roundedValue % interval);
public void testOffsetRounding() {
final long interval = 10;
final long offset = 7;
Rounding.OffsetRounding rounding = new Rounding.OffsetRounding(new Rounding.Interval(interval), offset);
assertEquals(-1, rounding.roundKey(6));
assertEquals(-3, rounding.round(6));
assertEquals(7, rounding.nextRoundingValue(-3));
assertEquals(0, rounding.roundKey(7));
assertEquals(7, rounding.round(7));
assertEquals(17, rounding.nextRoundingValue(7));
assertEquals(0, rounding.roundKey(16));
assertEquals(7, rounding.round(16));
assertEquals(1, rounding.roundKey(17));
assertEquals(17, rounding.round(17));
assertEquals(27, rounding.nextRoundingValue(17));
}
/**
* test OffsetRounding with an internal interval rounding on random inputs
*/
@Test
public void testPrePostRoundingRandom() {
final long interval = randomIntBetween(1, 100);
Rounding.Interval internalRounding = new Rounding.Interval(interval);
final long preRounding = randomIntBetween(-100, 100);
final long postRounding = randomIntBetween(-100, 100);
Rounding.PrePostRounding prePost = new Rounding.PrePostRounding(new Rounding.Interval(interval), preRounding, postRounding);
long safetyMargin = Math.abs(interval) + Math.abs(preRounding) + Math.abs(postRounding); // to prevent range overflow / underflow
public void testOffsetRoundingRandom() {
for (int i = 0; i < 1000; ++i) {
long l = Math.max(randomLong() - safetyMargin, Long.MIN_VALUE + safetyMargin);
final long key = prePost.roundKey(l);
final long r = prePost.round(l);
String message = "round(" + l + ", interval=" + interval + ") = "+ r;
assertEquals(message, internalRounding.round(l+preRounding), r - postRounding);
assertThat(message, r - postRounding, lessThanOrEqualTo(l + preRounding));
assertThat(message, r + interval - postRounding, greaterThan(l + preRounding));
assertEquals(message, r, key*interval + postRounding);
final long interval = randomIntBetween(1, 100);
Rounding.Interval internalRounding = new Rounding.Interval(interval);
final long offset = randomIntBetween(-100, 100);
Rounding.OffsetRounding rounding = new Rounding.OffsetRounding(internalRounding, offset);
long safetyMargin = Math.abs(interval) + Math.abs(offset); // to prevent range overflow
long value = Math.max(randomLong() - safetyMargin, Long.MIN_VALUE + safetyMargin);
final long key = rounding.roundKey(value);
final long key_next = rounding.roundKey(value + interval);
final long r_value = rounding.round(value);
final long nextRoundingValue = rounding.nextRoundingValue(r_value);
assertThat("Rounding should be idempotent", r_value, equalTo(rounding.round(r_value)));
assertThat("Rounded value smaller than unrounded, regardless of offset", r_value - offset, lessThanOrEqualTo(value - offset));
assertThat("Key and next_key should differ by one", key_next - key, equalTo(1L));
assertThat("Rounded value <= value < next interval start", r_value + interval, greaterThan(value));
assertThat("NextRounding value should be interval from rounded value", r_value + interval, equalTo(nextRoundingValue));
}
}
}

View File

@ -41,7 +41,7 @@ public class TimeZoneRoundingTests extends ElasticsearchTestCase {
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-09T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2012-01-09T00:00:00.000Z")), equalTo(utc("2012-01-16T00:00:00.000Z")));
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).postOffset(-TimeValue.timeValueHours(24).millis()).build();
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).offset(-TimeValue.timeValueHours(24).millis()).build();
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-08T00:00:00.000Z")));
assertThat(tzRounding.nextRoundingValue(utc("2012-01-08T00:00:00.000Z")), equalTo(utc("2012-01-15T00:00:00.000Z")));
}

View File

@ -31,10 +31,12 @@ import org.elasticsearch.test.transport.AssertingLocalTransport;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
@ -43,14 +45,16 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsNull.notNullValue;
/**
* The serialisation of pre and post offsets for the date histogram aggregation was corrected in version 1.4 to allow negative offsets and as such the
* serialisation of negative offsets in these tests would break in pre 1.4 versions. These tests are separated from the other DateHistogramTests so the
* The serialisation of offsets for the date histogram aggregation was corrected in version 1.4 to allow negative offsets and as such the
* serialisation of negative offsets in these tests would break in pre 1.4 versions. These tests are separated from the other DateHistogramTests so the
* AssertingLocalTransport for these tests can be set to only use versions 1.4 onwards while keeping the other tests using all versions
*/
@ElasticsearchIntegrationTest.SuiteScopeTest
@ElasticsearchIntegrationTest.ClusterScope(scope=ElasticsearchIntegrationTest.Scope.SUITE)
public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
private static final String DATE_FORMAT = "YY-MM-DD:hh-mm-ss";
private DateTime date(String date) {
return DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date);
}
@ -62,29 +66,36 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
.put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_1_4_0_Beta1).build();
}
@Before
public void beforeEachTest() throws IOException {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
}
@After
public void afterEachTest() throws IOException {
internalCluster().wipeIndices("idx2");
}
@Test
public void singleValue_WithPreOffset() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
DateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
date = date.plusHours(1);
private void prepareIndex(DateTime date, int numHours, int stepSizeHours, int idxIdStart) throws IOException, InterruptedException, ExecutionException {
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
date = date.plusHours(stepSizeHours);
}
indexRandom(true, reqs);
}
@Test
public void singleValue_WithPositiveOffset() throws Exception {
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, 1, 0);
SearchResponse response = client().prepareSearch("idx2")
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
.preOffset("-2h")
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.offset("2h")
.format(DATE_FORMAT)
.interval(DateHistogramInterval.DAY))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
@ -93,143 +104,74 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2l));
key = new DateTime(2014, 3, 11, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2l);
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3l);
}
@Test
public void singleValue_WithPreOffset_MinDocCount() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
DateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
date = date.plusHours(1);
}
indexRandom(true, reqs);
public void singleValue_WithNegativeOffset() throws Exception {
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, -1, 0);
SearchResponse response = client().prepareSearch("idx2")
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
.preOffset("-2h")
.offset("-2h")
.format(DATE_FORMAT)
.interval(DateHistogramInterval.DAY))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2l);
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3l);
}
/**
* Set offset so day buckets start at 6am. Index first 12 hours for two days, with one day gap.
* @throws Exception
*/
@Test
public void singleValue_WithOffset_MinDocCount() throws Exception {
prepareIndex(date("2014-03-11T00:00:00+00:00"), 12, 1, 0);
prepareIndex(date("2014-03-14T00:00:00+00:00"), 12, 1, 13);
SearchResponse response = client().prepareSearch("idx2")
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
.offset("6h")
.minDocCount(0)
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.format(DATE_FORMAT)
.interval(DateHistogramInterval.DAY))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
assertThat(response.getHits().getTotalHits(), equalTo(24l));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
assertThat(buckets.size(), equalTo(5));
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2l));
key = new DateTime(2014, 3, 11, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 6, 0, DateTimeZone.UTC), 6L);
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 6, 0, DateTimeZone.UTC), 6L);
checkBucketFor(buckets.get(2), new DateTime(2014, 3, 12, 6, 0, DateTimeZone.UTC), 0L);
checkBucketFor(buckets.get(3), new DateTime(2014, 3, 13, 6, 0, DateTimeZone.UTC), 6L);
checkBucketFor(buckets.get(4), new DateTime(2014, 3, 14, 6, 0, DateTimeZone.UTC), 6L);
}
@Test
public void singleValue_WithPostOffset() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
DateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
date = date.plusHours(6);
}
indexRandom(true, reqs);
SearchResponse response = client().prepareSearch("idx2")
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
.postOffset("2d")
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
DateTime key = new DateTime(2014, 3, 13, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
/**
* @param bucket the bucket to check asssertions for
* @param key the expected key
* @param expectedSize the expected size of the bucket
*/
private static void checkBucketFor(Histogram.Bucket bucket, DateTime key, long expectedSize) {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-13"));
assertThat(bucket.getKeyAsString(), equalTo(key.toString(DATE_FORMAT)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(4l));
key = new DateTime(2014, 3, 14, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-14"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
}
@Test
public void singleValue_WithPostOffset_MinDocCount() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
DateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
date = date.plusHours(6);
}
indexRandom(true, reqs);
SearchResponse response = client().prepareSearch("idx2")
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
.postOffset("2d")
.minDocCount(0)
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
DateTime key = new DateTime(2014, 3, 13, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-13"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(4l));
key = new DateTime(2014, 3, 14, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-14"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
assertThat(bucket.getDocCount(), equalTo(expectedSize));
}
}