Aggregations: Fixes pre and post offset serialisation for histogram aggs
Changes the serialisation of pre and post offset to use Long instead of VLong so that negative values are supported. This actually only showed up in the case where minDocCount=0 as the rounding is only serialised in this case. Closes #7312
This commit is contained in:
parent
f956920acc
commit
8550b9e84b
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.common.rounding;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
|
@ -218,15 +219,25 @@ public abstract class Rounding implements Streamable {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
rounding = Rounding.Streams.read(in);
|
||||
preOffset = in.readVLong();
|
||||
postOffset = in.readVLong();
|
||||
if (in.getVersion().before(Version.V_1_4_0)) {
|
||||
preOffset = in.readVLong();
|
||||
postOffset = in.readVLong();
|
||||
} else {
|
||||
preOffset = in.readLong();
|
||||
postOffset = in.readLong();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
Rounding.Streams.write(rounding, out);
|
||||
out.writeVLong(preOffset);
|
||||
out.writeVLong(postOffset);
|
||||
if (out.getVersion().before(Version.V_1_4_0)) {
|
||||
out.writeVLong(preOffset);
|
||||
out.writeVLong(postOffset);
|
||||
} else {
|
||||
out.writeLong(preOffset);
|
||||
out.writeLong(postOffset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,208 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
import org.elasticsearch.test.transport.AssertingLocalTransport;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
* The serialisation of pre and post offsets for the date histogram aggregation was corrected in version 1.4 to allow negative offsets and as such the
|
||||
* serialisation of negative offsets in these tests would break in pre 1.4 versions. These tests are separated from the other DateHistogramTests so the
|
||||
* AssertingLocalTransport for these tests can be set to only use versions 1.4 onwards while keeping the other tests using all versions
|
||||
*/
|
||||
@ElasticsearchIntegrationTest.SuiteScopeTest
|
||||
@ElasticsearchIntegrationTest.ClusterScope(scope=ElasticsearchIntegrationTest.Scope.SUITE)
|
||||
public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
private DateTime date(String date) {
|
||||
return DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return ImmutableSettings.builder()
|
||||
.put(AssertingLocalTransport.ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_1_4_0).build();
|
||||
}
|
||||
|
||||
@After
|
||||
public void afterEachTest() throws IOException {
|
||||
internalCluster().wipeIndices("idx2");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithPreOffset() throws Exception {
|
||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
||||
for (int i = 0; i < reqs.length; i++) {
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
date = date.plusHours(1);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(dateHistogram("date_histo")
|
||||
.field("date")
|
||||
.preOffset("-2h")
|
||||
.interval(DateHistogram.Interval.DAY)
|
||||
.format("yyyy-MM-dd"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||
|
||||
DateHistogram histo = response.getAggregations().get("date_histo");
|
||||
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(2));
|
||||
|
||||
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-10");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(2l));
|
||||
|
||||
bucket = histo.getBucketByKey("2014-03-11");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(3l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithPreOffset_MinDocCount() throws Exception {
|
||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
||||
for (int i = 0; i < reqs.length; i++) {
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
date = date.plusHours(1);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(dateHistogram("date_histo")
|
||||
.field("date")
|
||||
.preOffset("-2h")
|
||||
.minDocCount(0)
|
||||
.interval(DateHistogram.Interval.DAY)
|
||||
.format("yyyy-MM-dd"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||
|
||||
DateHistogram histo = response.getAggregations().get("date_histo");
|
||||
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(2));
|
||||
|
||||
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-10");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(2l));
|
||||
|
||||
bucket = histo.getBucketByKey("2014-03-11");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(3l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithPostOffset() throws Exception {
|
||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
||||
for (int i = 0; i < reqs.length; i++) {
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
date = date.plusHours(6);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(dateHistogram("date_histo")
|
||||
.field("date")
|
||||
.postOffset("2d")
|
||||
.interval(DateHistogram.Interval.DAY)
|
||||
.format("yyyy-MM-dd"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||
|
||||
DateHistogram histo = response.getAggregations().get("date_histo");
|
||||
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(2));
|
||||
|
||||
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-13");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(4l));
|
||||
|
||||
bucket = histo.getBucketByKey("2014-03-14");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithPostOffset_MinDocCount() throws Exception {
|
||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
||||
for (int i = 0; i < reqs.length; i++) {
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
date = date.plusHours(6);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(dateHistogram("date_histo")
|
||||
.field("date")
|
||||
.postOffset("2d")
|
||||
.minDocCount(0)
|
||||
.interval(DateHistogram.Interval.DAY)
|
||||
.format("yyyy-MM-dd"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||
|
||||
DateHistogram histo = response.getAggregations().get("date_histo");
|
||||
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(2));
|
||||
|
||||
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-13");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(4l));
|
||||
|
||||
bucket = histo.getBucketByKey("2014-03-14");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||
}
|
||||
}
|
|
@ -1051,76 +1051,6 @@ public class DateHistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(bucket.getDocCount(), equalTo(3l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithPreOffset() throws Exception {
|
||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
||||
for (int i = 0; i < reqs.length; i++) {
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
date = date.plusHours(1);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(dateHistogram("date_histo")
|
||||
.field("date")
|
||||
.preOffset("-2h")
|
||||
.interval(DateHistogram.Interval.DAY)
|
||||
.format("yyyy-MM-dd"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||
|
||||
DateHistogram histo = response.getAggregations().get("date_histo");
|
||||
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(2));
|
||||
|
||||
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-10");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(2l));
|
||||
|
||||
bucket = histo.getBucketByKey("2014-03-11");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(3l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithPostOffset() throws Exception {
|
||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
|
||||
DateTime date = date("2014-03-11T00:00:00+00:00");
|
||||
for (int i = 0; i < reqs.length; i++) {
|
||||
reqs[i] = client().prepareIndex("idx2", "type", "" + i).setSource(jsonBuilder().startObject().field("date", date).endObject());
|
||||
date = date.plusHours(6);
|
||||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(dateHistogram("date_histo")
|
||||
.field("date")
|
||||
.postOffset("2d")
|
||||
.interval(DateHistogram.Interval.DAY)
|
||||
.format("yyyy-MM-dd"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.getHits().getTotalHits(), equalTo(5l));
|
||||
|
||||
DateHistogram histo = response.getAggregations().get("date_histo");
|
||||
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(2));
|
||||
|
||||
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-13");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(4l));
|
||||
|
||||
bucket = histo.getBucketByKey("2014-03-14");
|
||||
assertThat(bucket, Matchers.notNullValue());
|
||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithPreZone_WithAadjustLargeInterval() throws Exception {
|
||||
prepareCreate("idx2").addMapping("type", "date", "type=date").execute().actionGet();
|
||||
|
|
|
@ -19,14 +19,10 @@
|
|||
package org.elasticsearch.test;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedTest;
|
||||
import com.carrotsearch.randomizedtesting.annotations.Listeners;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
|
||||
import com.carrotsearch.randomizedtesting.annotations.*;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
|
||||
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.search.FieldCache;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.AbstractRandomizedTest;
|
||||
|
@ -297,20 +293,84 @@ public abstract class ElasticsearchTestCase extends AbstractRandomizedTest {
|
|||
SORTED_VERSIONS = version.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the {@link Version} before the {@link Version#CURRENT}
|
||||
*/
|
||||
public static Version getPreviousVersion() {
|
||||
Version version = SORTED_VERSIONS.get(1);
|
||||
assert version.before(Version.CURRENT);
|
||||
return version;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A random {@link Version}.
|
||||
*
|
||||
* @return a random {@link Version} from all available versions
|
||||
*/
|
||||
public static Version randomVersion() {
|
||||
return randomVersion(getRandom());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A random {@link Version}.
|
||||
*
|
||||
* @param random
|
||||
* the {@link Random} to use to generate the random version
|
||||
*
|
||||
* @return a random {@link Version} from all available versions
|
||||
*/
|
||||
public static Version randomVersion(Random random) {
|
||||
return SORTED_VERSIONS.get(random.nextInt(SORTED_VERSIONS.size()));
|
||||
}
|
||||
|
||||
/**
|
||||
* A random {@link Version} from <code>minVersion</code> to
|
||||
* <code>maxVersion</code> (inclusive).
|
||||
*
|
||||
* @param minVersion
|
||||
* the minimum version (inclusive)
|
||||
* @param maxVersion
|
||||
* the maximum version (inclusive)
|
||||
* @return a random {@link Version} from <code>minVersion</code> to
|
||||
* <code>maxVersion</code> (inclusive)
|
||||
*/
|
||||
public static Version randomVersionBetween(Version minVersion, Version maxVersion) {
|
||||
return randomVersionBetween(getRandom(), minVersion, maxVersion);
|
||||
}
|
||||
|
||||
/**
|
||||
* A random {@link Version} from <code>minVersion</code> to
|
||||
* <code>maxVersion</code> (inclusive).
|
||||
*
|
||||
* @param random
|
||||
* the {@link Random} to use to generate the random version
|
||||
* @param minVersion
|
||||
* the minimum version (inclusive)
|
||||
* @param maxVersion
|
||||
* the maximum version (inclusive)
|
||||
* @return a random {@link Version} from <code>minVersion</code> to
|
||||
* <code>maxVersion</code> (inclusive)
|
||||
*/
|
||||
public static Version randomVersionBetween(Random random, Version minVersion, Version maxVersion) {
|
||||
int minVersionIndex = SORTED_VERSIONS.size();
|
||||
if (minVersion != null) {
|
||||
minVersionIndex = SORTED_VERSIONS.indexOf(minVersion);
|
||||
}
|
||||
int maxVersionIndex = 0;
|
||||
if (maxVersion != null) {
|
||||
maxVersionIndex = SORTED_VERSIONS.indexOf(maxVersion);
|
||||
}
|
||||
if (minVersionIndex == -1) {
|
||||
throw new IllegalArgumentException("minVersion [" + minVersion + "] does not exist.");
|
||||
} else if (maxVersionIndex == -1) {
|
||||
throw new IllegalArgumentException("maxVersion [" + maxVersion + "] does not exist.");
|
||||
} else {
|
||||
// minVersionIndex is inclusive so need to add 1 to this index
|
||||
int range = minVersionIndex + 1 - maxVersionIndex;
|
||||
return SORTED_VERSIONS.get(maxVersionIndex + random.nextInt(range));
|
||||
}
|
||||
}
|
||||
|
||||
static final class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
|
||||
|
||||
private final Thread.UncaughtExceptionHandler parent;
|
||||
|
|
|
@ -37,24 +37,31 @@ import java.util.Random;
|
|||
*
|
||||
*/
|
||||
public class AssertingLocalTransport extends LocalTransport {
|
||||
|
||||
public static final String ASSERTING_TRANSPORT_MIN_VERSION_KEY = "transport.asserting.version.min";
|
||||
public static final String ASSERTING_TRANSPORT_MAX_VERSION_KEY = "transport.asserting.version.max";
|
||||
private final Random random;
|
||||
private final Version minVersion;
|
||||
private final Version maxVersion;
|
||||
|
||||
@Inject
|
||||
public AssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version) {
|
||||
super(settings, threadPool, version);
|
||||
final long seed = settings.getAsLong(ElasticsearchIntegrationTest.SETTING_INDEX_SEED, 0l);
|
||||
random = new Random(seed);
|
||||
minVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_0_18_0);
|
||||
maxVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MAX_VERSION_KEY, Version.CURRENT);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void handleParsedResponse(final TransportResponse response, final TransportResponseHandler handler) {
|
||||
ElasticsearchAssertions.assertVersionSerializable(ElasticsearchTestCase.randomVersion(random), response);
|
||||
ElasticsearchAssertions.assertVersionSerializable(ElasticsearchTestCase.randomVersionBetween(random, minVersion, maxVersion), response);
|
||||
super.handleParsedResponse(response, handler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sendRequest(final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException {
|
||||
ElasticsearchAssertions.assertVersionSerializable(ElasticsearchTestCase.randomVersion(random), request);
|
||||
ElasticsearchAssertions.assertVersionSerializable(ElasticsearchTestCase.randomVersionBetween(random, minVersion, maxVersion), request);
|
||||
super.sendRequest(node, requestId, action, request, options);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue