[TEST] adapt to Aggregations being an abstract class with final methods (elastic/x-pack-elasticsearch#1128)
Original commit: elastic/x-pack-elasticsearch@080548e411
This commit is contained in:
parent
a11e52fea2
commit
fc316bd947
|
@ -166,9 +166,7 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
||||||
|
|
||||||
public void testExtractionGivenResponseHasEmptyAggs() throws IOException {
|
public void testExtractionGivenResponseHasEmptyAggs() throws IOException {
|
||||||
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
|
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
|
||||||
|
Aggregations emptyAggs = AggregationTestUtils.createAggs(Collections.emptyList());
|
||||||
Aggregations emptyAggs = mock(Aggregations.class);
|
|
||||||
when(emptyAggs.asList()).thenReturn(Collections.emptyList());
|
|
||||||
SearchResponse response = createSearchResponse(emptyAggs);
|
SearchResponse response = createSearchResponse(emptyAggs);
|
||||||
extractor.setNextResponse(response);
|
extractor.setNextResponse(response);
|
||||||
|
|
||||||
|
@ -183,9 +181,8 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
||||||
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
|
TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L);
|
||||||
|
|
||||||
Terms termsAgg = mock(Terms.class);
|
Terms termsAgg = mock(Terms.class);
|
||||||
Aggregations emptyAggs = mock(Aggregations.class);
|
Aggregations aggs = AggregationTestUtils.createAggs(Collections.singletonList(termsAgg));
|
||||||
when(emptyAggs.asList()).thenReturn(Collections.singletonList(termsAgg));
|
SearchResponse response = createSearchResponse(aggs);
|
||||||
SearchResponse response = createSearchResponse(emptyAggs);
|
|
||||||
extractor.setNextResponse(response);
|
extractor.setNextResponse(response);
|
||||||
|
|
||||||
assertThat(extractor.hasNext(), is(true));
|
assertThat(extractor.hasNext(), is(true));
|
||||||
|
@ -201,13 +198,12 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
||||||
Histogram histogram2 = mock(Histogram.class);
|
Histogram histogram2 = mock(Histogram.class);
|
||||||
when(histogram2.getName()).thenReturn("hist_2");
|
when(histogram2.getName()).thenReturn("hist_2");
|
||||||
|
|
||||||
Aggregations emptyAggs = mock(Aggregations.class);
|
Aggregations aggs = AggregationTestUtils.createAggs(Arrays.asList(histogram1, histogram2));
|
||||||
when(emptyAggs.asList()).thenReturn(Arrays.asList(histogram1, histogram2));
|
SearchResponse response = createSearchResponse(aggs);
|
||||||
SearchResponse response = createSearchResponse(emptyAggs);
|
|
||||||
extractor.setNextResponse(response);
|
extractor.setNextResponse(response);
|
||||||
|
|
||||||
assertThat(extractor.hasNext(), is(true));
|
assertThat(extractor.hasNext(), is(true));
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> extractor.next());
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, extractor::next);
|
||||||
assertThat(e.getMessage(), containsString("Multiple top level aggregations not supported; found: [hist_1, hist_2]"));
|
assertThat(e.getMessage(), containsString("Multiple top level aggregations not supported; found: [hist_1, hist_2]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -283,8 +279,7 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
||||||
when(histogram.getName()).thenReturn(histogramName);
|
when(histogram.getName()).thenReturn(histogramName);
|
||||||
when((List<Histogram.Bucket>)histogram.getBuckets()).thenReturn(histogramBuckets);
|
when((List<Histogram.Bucket>)histogram.getBuckets()).thenReturn(histogramBuckets);
|
||||||
|
|
||||||
Aggregations searchAggs = mock(Aggregations.class);
|
Aggregations searchAggs = AggregationTestUtils.createAggs(Collections.singletonList(histogram));
|
||||||
when(searchAggs.asList()).thenReturn(Collections.singletonList(histogram));
|
|
||||||
return createSearchResponse(searchAggs);
|
return createSearchResponse(searchAggs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,10 +14,8 @@ import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation;
|
||||||
import org.elasticsearch.search.aggregations.metrics.max.Max;
|
import org.elasticsearch.search.aggregations.metrics.max.Max;
|
||||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||||
import org.joda.time.DateTime;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -38,12 +36,7 @@ public final class AggregationTestUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
static Aggregations createAggs(List<Aggregation> aggsList) {
|
static Aggregations createAggs(List<Aggregation> aggsList) {
|
||||||
Aggregations aggs = mock(Aggregations.class);
|
return new Aggregations(aggsList) {};
|
||||||
when(aggs.asList()).thenReturn(aggsList);
|
|
||||||
for (Aggregation agg: aggsList) {
|
|
||||||
when(aggs.get(agg.getName())).thenReturn(agg);
|
|
||||||
}
|
|
||||||
return aggs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static Histogram.Bucket createHistogramBucket(long timestamp, long docCount) {
|
static Histogram.Bucket createHistogramBucket(long timestamp, long docCount) {
|
||||||
|
@ -53,13 +46,6 @@ public final class AggregationTestUtils {
|
||||||
return bucket;
|
return bucket;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Histogram.Bucket createDateHistogramBucket(DateTime timestamp, long docCount) {
|
|
||||||
Histogram.Bucket bucket = mock(Histogram.Bucket.class);
|
|
||||||
when(bucket.getKey()).thenReturn(timestamp);
|
|
||||||
when(bucket.getDocCount()).thenReturn(docCount);
|
|
||||||
return bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
static Max createMax(String name, double value) {
|
static Max createMax(String name, double value) {
|
||||||
Max max = mock(Max.class);
|
Max max = mock(Max.class);
|
||||||
when(max.getName()).thenReturn(name);
|
when(max.getName()).thenReturn(name);
|
||||||
|
|
|
@ -47,8 +47,8 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
||||||
|
|
||||||
public void testProcessGivenNonMaxTimeAgg() throws IOException {
|
public void testProcessGivenNonMaxTimeAgg() throws IOException {
|
||||||
List<Histogram.Bucket> histogramBuckets = Arrays.asList(
|
List<Histogram.Bucket> histogramBuckets = Arrays.asList(
|
||||||
createHistogramBucket(1000L, 3, Arrays.asList(createTerms("time"))),
|
createHistogramBucket(1000L, 3, Collections.singletonList(createTerms("time"))),
|
||||||
createHistogramBucket(2000L, 5, Arrays.asList(createTerms("time")))
|
createHistogramBucket(2000L, 5, Collections.singletonList(createTerms("time")))
|
||||||
);
|
);
|
||||||
|
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> aggToString("time", histogramBuckets));
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> aggToString("time", histogramBuckets));
|
||||||
|
@ -57,8 +57,8 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
||||||
|
|
||||||
public void testProcessGivenHistogramOnly() throws IOException {
|
public void testProcessGivenHistogramOnly() throws IOException {
|
||||||
List<Histogram.Bucket> histogramBuckets = Arrays.asList(
|
List<Histogram.Bucket> histogramBuckets = Arrays.asList(
|
||||||
createHistogramBucket(1000L, 3, Arrays.asList(createMax("timestamp", 1200))),
|
createHistogramBucket(1000L, 3, Collections.singletonList(createMax("timestamp", 1200))),
|
||||||
createHistogramBucket(2000L, 5, Arrays.asList(createMax("timestamp", 2800)))
|
createHistogramBucket(2000L, 5, Collections.singletonList(createMax("timestamp", 2800)))
|
||||||
);
|
);
|
||||||
|
|
||||||
String json = aggToString("timestamp", histogramBuckets);
|
String json = aggToString("timestamp", histogramBuckets);
|
||||||
|
@ -69,8 +69,8 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
||||||
|
|
||||||
public void testProcessGivenHistogramOnlyAndNoDocCount() throws IOException {
|
public void testProcessGivenHistogramOnlyAndNoDocCount() throws IOException {
|
||||||
List<Histogram.Bucket> histogramBuckets = Arrays.asList(
|
List<Histogram.Bucket> histogramBuckets = Arrays.asList(
|
||||||
createHistogramBucket(1000L, 3, Arrays.asList(createMax("time", 1000))),
|
createHistogramBucket(1000L, 3, Collections.singletonList(createMax("time", 1000))),
|
||||||
createHistogramBucket(2000L, 5, Arrays.asList(createMax("time", 2000)))
|
createHistogramBucket(2000L, 5, Collections.singletonList(createMax("time", 2000)))
|
||||||
);
|
);
|
||||||
|
|
||||||
String json = aggToString("time", false, histogramBuckets);
|
String json = aggToString("time", false, histogramBuckets);
|
||||||
|
@ -100,7 +100,7 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
||||||
createHistogramBucket(2000L, 5, Arrays.asList(
|
createHistogramBucket(2000L, 5, Arrays.asList(
|
||||||
createMax("time", 2200),
|
createMax("time", 2200),
|
||||||
createTerms("my_field", new Term("a", 5), new Term("b", 2)))),
|
createTerms("my_field", new Term("a", 5), new Term("b", 2)))),
|
||||||
createHistogramBucket(3000L, 0, Arrays.asList(createMax("time", -1))),
|
createHistogramBucket(3000L, 0, Collections.singletonList(createMax("time", -1))),
|
||||||
createHistogramBucket(4000L, 7, Arrays.asList(
|
createHistogramBucket(4000L, 7, Arrays.asList(
|
||||||
createMax("time", 4400),
|
createMax("time", 4400),
|
||||||
createTerms("my_field", new Term("c", 4), new Term("b", 3))))
|
createTerms("my_field", new Term("c", 4), new Term("b", 3))))
|
||||||
|
@ -126,7 +126,7 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
||||||
createHistogramBucket(2000L, 5, Arrays.asList(
|
createHistogramBucket(2000L, 5, Arrays.asList(
|
||||||
createMax("time", 2000),
|
createMax("time", 2000),
|
||||||
createTerms("my_field", new Term("a", 5, "my_value", 21.0), new Term("b", 2, "my_value", 22.0)))),
|
createTerms("my_field", new Term("a", 5, "my_value", 21.0), new Term("b", 2, "my_value", 22.0)))),
|
||||||
createHistogramBucket(3000L, 0, Arrays.asList(createMax("time", 3000))),
|
createHistogramBucket(3000L, 0, Collections.singletonList(createMax("time", 3000))),
|
||||||
createHistogramBucket(4000L, 7, Arrays.asList(
|
createHistogramBucket(4000L, 7, Arrays.asList(
|
||||||
createMax("time", 4000),
|
createMax("time", 4000),
|
||||||
createTerms("my_field", new Term("c", 4, "my_value", 41.0), new Term("b", 3, "my_value", 42.0))))
|
createTerms("my_field", new Term("c", 4, "my_value", 41.0), new Term("b", 3, "my_value", 42.0))))
|
||||||
|
@ -173,7 +173,7 @@ public class AggregationToJsonProcessorTests extends ESTestCase {
|
||||||
createHistogramBucket(2000L, 5, Arrays.asList(
|
createHistogramBucket(2000L, 5, Arrays.asList(
|
||||||
createMax("time", 2000),
|
createMax("time", 2000),
|
||||||
createTerms("my_field", new Term("a", 5, a2NumericAggs), new Term("b", 2, b2NumericAggs)))),
|
createTerms("my_field", new Term("a", 5, a2NumericAggs), new Term("b", 2, b2NumericAggs)))),
|
||||||
createHistogramBucket(3000L, 0, Arrays.asList(createMax("time", 3000))),
|
createHistogramBucket(3000L, 0, Collections.singletonList(createMax("time", 3000))),
|
||||||
createHistogramBucket(4000L, 7, Arrays.asList(
|
createHistogramBucket(4000L, 7, Arrays.asList(
|
||||||
createMax("time", 4000),
|
createMax("time", 4000),
|
||||||
createTerms("my_field", new Term("c", 4, c4NumericAggs), new Term("b", 3, b4NumericAggs))))
|
createTerms("my_field", new Term("c", 4, c4NumericAggs), new Term("b", 3, b4NumericAggs))))
|
||||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.mock.orig.Mockito;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
import org.elasticsearch.search.SearchHits;
|
import org.elasticsearch.search.SearchHits;
|
||||||
|
import org.elasticsearch.search.aggregations.Aggregation;
|
||||||
import org.elasticsearch.search.aggregations.Aggregations;
|
import org.elasticsearch.search.aggregations.Aggregations;
|
||||||
import org.elasticsearch.search.aggregations.metrics.max.Max;
|
import org.elasticsearch.search.aggregations.metrics.max.Max;
|
||||||
import org.elasticsearch.search.aggregations.metrics.min.Min;
|
import org.elasticsearch.search.aggregations.metrics.min.Min;
|
||||||
|
@ -28,6 +29,7 @@ import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
|
@ -395,7 +397,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
||||||
extractor.setNextResponse(createErrorResponse());
|
extractor.setNextResponse(createErrorResponse());
|
||||||
|
|
||||||
assertThat(extractor.hasNext(), is(true));
|
assertThat(extractor.hasNext(), is(true));
|
||||||
expectThrows(IOException.class, () -> extractor.next());
|
expectThrows(IOException.class, extractor::next);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDataSummaryRequestHasShardFailures() {
|
public void testDataSummaryRequestHasShardFailures() {
|
||||||
|
@ -404,7 +406,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
||||||
extractor.setNextResponse(createResponseWithShardFailures());
|
extractor.setNextResponse(createResponseWithShardFailures());
|
||||||
|
|
||||||
assertThat(extractor.hasNext(), is(true));
|
assertThat(extractor.hasNext(), is(true));
|
||||||
expectThrows(IOException.class, () -> extractor.next());
|
expectThrows(IOException.class, extractor::next);
|
||||||
}
|
}
|
||||||
|
|
||||||
private SearchResponse createSearchResponse(long totalHits, long earliestTime, long latestTime) {
|
private SearchResponse createSearchResponse(long totalHits, long earliestTime, long latestTime) {
|
||||||
|
@ -414,14 +416,17 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
||||||
SearchHits searchHits = new SearchHits(hits, totalHits, 1);
|
SearchHits searchHits = new SearchHits(hits, totalHits, 1);
|
||||||
when(searchResponse.getHits()).thenReturn(searchHits);
|
when(searchResponse.getHits()).thenReturn(searchHits);
|
||||||
|
|
||||||
Aggregations aggs = mock(Aggregations.class);
|
List<Aggregation> aggs = new ArrayList<>();
|
||||||
Min min = mock(Min.class);
|
Min min = mock(Min.class);
|
||||||
when(min.getValue()).thenReturn((double) earliestTime);
|
when(min.getValue()).thenReturn((double) earliestTime);
|
||||||
when(aggs.get("earliest_time")).thenReturn(min);
|
when(min.getName()).thenReturn("earliest_time");
|
||||||
|
aggs.add(min);
|
||||||
Max max = mock(Max.class);
|
Max max = mock(Max.class);
|
||||||
when(max.getValue()).thenReturn((double) latestTime);
|
when(max.getValue()).thenReturn((double) latestTime);
|
||||||
when(aggs.get("latest_time")).thenReturn(max);
|
when(max.getName()).thenReturn("latest_time");
|
||||||
when(searchResponse.getAggregations()).thenReturn(aggs);
|
aggs.add(max);
|
||||||
|
Aggregations aggregations = new Aggregations(aggs) {};
|
||||||
|
when(searchResponse.getAggregations()).thenReturn(aggregations);
|
||||||
return searchResponse;
|
return searchResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -450,9 +455,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
||||||
StubSubExtractor() {}
|
StubSubExtractor() {}
|
||||||
|
|
||||||
StubSubExtractor(InputStream... streams) {
|
StubSubExtractor(InputStream... streams) {
|
||||||
for (InputStream stream : streams) {
|
Collections.addAll(this.streams, streams);
|
||||||
this.streams.add(stream);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
Loading…
Reference in New Issue