[TEST] Fix HistogramTests

Fixed histogram tests for value scripts as it was picking the wrong buckets form the bucket list following the removal of the getBucketByKey method
This commit is contained in:
Colin Goodheart-Smithe 2015-01-27 12:10:26 +00:00
parent 7e6e9dbb96
commit 6f894b1d2c

View File

@ -585,11 +585,12 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
List<? extends Bucket> buckets = histo.getBuckets(); List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numBuckets)); assertThat(buckets.size(), equalTo(numBuckets));
for (int i = 2 / interval; i <= (numDocs + 1) / interval; ++i) { for (int i = 0; i < numBuckets; i++) {
Histogram.Bucket bucket = buckets.get(i); Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue()); assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); int key = ((2 / interval) + i) * interval;
assertThat(bucket.getDocCount(), equalTo(counts[i])); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key));
assertThat(bucket.getDocCount(), equalTo(counts[key / interval]));
} }
} }
@ -665,11 +666,12 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
List<? extends Bucket> buckets = histo.getBuckets(); List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numBuckets)); assertThat(buckets.size(), equalTo(numBuckets));
for (int i = 2 / interval; i <= (numDocs + 2) / interval; ++i) { for (int i = 0; i < numBuckets; i++) {
Histogram.Bucket bucket = buckets.get(i); Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue()); assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); int key = ((2 / interval) + i) * interval;
assertThat(bucket.getDocCount(), equalTo(counts[i])); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key));
assertThat(bucket.getDocCount(), equalTo(counts[key / interval]));
} }
} }
@ -701,16 +703,17 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
List<? extends Bucket> buckets = histo.getBuckets(); List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numBuckets)); assertThat(buckets.size(), equalTo(numBuckets));
for (int i = 2 / interval; i < (numDocs + 2) / interval; ++i) { for (int i = 0; i < numBuckets; i++) {
Histogram.Bucket bucket = buckets.get(i); Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue()); assertThat(bucket, notNullValue());
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); int key = ((2 / interval) + i) * interval;
assertThat(bucket.getDocCount(), equalTo(counts[i])); assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) key));
assertThat(bucket.getDocCount(), equalTo(counts[key / interval]));
Terms terms = bucket.getAggregations().get(MULTI_VALUED_FIELD_NAME); Terms terms = bucket.getAggregations().get(MULTI_VALUED_FIELD_NAME);
assertThat(terms, notNullValue()); assertThat(terms, notNullValue());
assertThat(terms.getName(), equalTo(MULTI_VALUED_FIELD_NAME)); assertThat(terms.getName(), equalTo(MULTI_VALUED_FIELD_NAME));
int minTerm = Math.max(2, i * interval - 1); int minTerm = Math.max(2, key - 1);
int maxTerm = Math.min(numDocs + 2, (i + 1) * interval); int maxTerm = Math.min(numDocs + 2, (key / interval + 1) * interval);
assertThat(terms.getBuckets().size(), equalTo(maxTerm - minTerm + 1)); assertThat(terms.getBuckets().size(), equalTo(maxTerm - minTerm + 1));
Iterator<Terms.Bucket> iter = terms.getBuckets().iterator(); Iterator<Terms.Bucket> iter = terms.getBuckets().iterator();
for (int j = minTerm; j <= maxTerm; ++j) { for (int j = minTerm; j <= maxTerm; ++j) {