Merge pull request #16048 from nik9000/unraw_4

Remove lots of raw from aggregations
This commit is contained in:
Nik Everett 2016-01-18 09:57:00 -05:00
commit d1a2bee1e3
25 changed files with 68 additions and 57 deletions

View File

@ -1065,7 +1065,7 @@ public abstract class Engine implements Closeable {
}
}
public static class CommitId implements Writeable {
public static class CommitId implements Writeable<CommitId> {
private final byte[] id;

View File

@ -62,7 +62,8 @@ public class GeoHashGridParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context).build();
ValuesSourceParser<ValuesSource.GeoPoint> vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context)
.build();
int precision = GeoHashGridParams.DEFAULT_PRECISION;
int requiredSize = GeoHashGridParams.DEFAULT_MAX_NUM_CELLS;
@ -131,6 +132,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize,
Collections.<InternalGeoHashGrid.Bucket> emptyList(), pipelineAggregators, metaData);
return new NonCollectingAggregator(name, aggregationContext, parent, pipelineAggregators, metaData) {
@Override
public InternalAggregation buildEmptyAggregation() {
return aggregation;
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@ -78,7 +79,7 @@ public class DateHistogramParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context)
ValuesSourceParser<Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context)
.targetValueType(ValueType.DATE)
.formattable(true)
.timezoneAware(true)
@ -190,7 +191,7 @@ public class DateHistogramParser implements Aggregator.Parser {
.timeZone(vsParser.input().timezone())
.offset(offset).build();
ValuesSourceConfig config = vsParser.config();
ValuesSourceConfig<Numeric> config = vsParser.config();
return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds,
new InternalDateHistogram.Factory());

View File

@ -25,6 +25,7 @@ import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.aggregations.support.format.ValueParser;
import org.elasticsearch.search.internal.SearchContext;
@ -46,7 +47,7 @@ public class HistogramParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context)
ValuesSourceParser<Numeric> vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context)
.targetValueType(ValueType.NUMERIC)
.formattable(true)
.build();
@ -127,7 +128,7 @@ public class HistogramParser implements Aggregator.Parser {
Rounding rounding = new Rounding.Interval(interval);
if (offset != 0) {
rounding = new Rounding.OffsetRounding((Rounding.Interval) rounding, offset);
rounding = new Rounding.OffsetRounding(rounding, offset);
}
if (extendedBounds != null) {
@ -136,7 +137,7 @@ public class HistogramParser implements Aggregator.Parser {
}
return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds,
new InternalHistogram.Factory());
new InternalHistogram.Factory<>());
}

View File

@ -81,9 +81,9 @@ public class MissingAggregator extends SingleBucketAggregator {
return new InternalMissing(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
}
public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource> {
public static class Factory extends ValuesSourceAggregatorFactory<ValuesSource> {
public Factory(String name, ValuesSourceConfig valueSourceConfig) {
public Factory(String name, ValuesSourceConfig<ValuesSource> valueSourceConfig) {
super(name, InternalMissing.TYPE.name(), valueSourceConfig);
}

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@ -39,8 +40,7 @@ public class MissingParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context)
ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context)
.scriptable(false)
.build();

View File

@ -203,7 +203,8 @@ public class SamplerAggregator extends SingleBucketAggregator {
private int maxDocsPerValue;
private String executionHint;
public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig vsConfig, int maxDocsPerValue) {
public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig<ValuesSource> vsConfig,
int maxDocsPerValue) {
super(name, InternalSampler.TYPE.name(), vsConfig);
this.shardSize = shardSize;
this.maxDocsPerValue = maxDocsPerValue;

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@ -55,10 +56,10 @@ public class SamplerParser implements Aggregator.Parser {
String executionHint = null;
int shardSize = DEFAULT_SHARD_SAMPLE_SIZE;
int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT;
ValuesSourceParser vsParser = null;
boolean diversityChoiceMade = false;
vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true).formattable(false).build();
ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true)
.formattable(false).build();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -88,7 +89,7 @@ public class SamplerParser implements Aggregator.Parser {
}
}
ValuesSourceConfig vsConfig = vsParser.config();
ValuesSourceConfig<ValuesSource> vsConfig = vsParser.config();
if (vsConfig.valid()) {
return new SamplerAggregator.DiversifiedFactory(aggregationName, shardSize, executionHint, vsConfig, maxDocsPerValue);
} else {

View File

@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
@ -80,8 +79,6 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource;
IndexSearcher indexSearcher = aggregationContext.searchContext().searcher();
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
return new GlobalOrdinalsSignificantTermsAggregator(name, factories,
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext,
@ -98,9 +95,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories,
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter,
aggregationContext,
parent, termsAggregatorFactory, pipelineAggregators, metaData);
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, parent,
termsAggregatorFactory, pipelineAggregators, metaData);
}
};
@ -143,7 +139,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
return new TermsAggregator.BucketCountThresholds(bucketCountThresholds);
}
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude,
String executionHint, Query filter, SignificanceHeuristic significanceHeuristic) {
super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig);

View File

@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@ -53,7 +54,7 @@ public class SignificantTermsParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
SignificantTermsParametersParser aggParser = new SignificantTermsParametersParser(significanceHeuristicParserMapper);
ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context)
ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context)
.scriptable(false)
.formattable(true)
.build();

View File

@ -36,13 +36,13 @@ public abstract class AbstractTermsParametersParser {
public static final ParseField SHARD_MIN_DOC_COUNT_FIELD_NAME = new ParseField("shard_min_doc_count");
public static final ParseField REQUIRED_SIZE_FIELD_NAME = new ParseField("size");
public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error");
//These are the results of the parsing.
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds();
private String executionHint = null;
private SubAggCollectionMode collectMode = SubAggCollectionMode.DEPTH_FIRST;
@ -59,12 +59,12 @@ public abstract class AbstractTermsParametersParser {
public IncludeExclude getIncludeExclude() {
return includeExclude;
}
public SubAggCollectionMode getCollectionMode() {
return collectMode;
}
public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser vsParser, IncludeExclude.Parser incExcParser) throws IOException {
public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser<?> vsParser, IncludeExclude.Parser incExcParser) throws IOException {
bucketCountThresholds = getDefaultBucketCountThresholds();
XContentParser.Token token;
String currentFieldName = null;

View File

@ -165,7 +165,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
private final boolean showTermDocCountError;
public TermsAggregatorFactory(String name, ValuesSourceConfig config, Terms.Order order,
public TermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, Terms.Order order,
TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, String executionHint,
SubAggCollectionMode executionMode, boolean showTermDocCountError) {
super(name, StringTerms.TYPE.name(), config);

View File

@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.TermsParametersParser.OrderElement;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@ -45,7 +46,8 @@ public class TermsParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
TermsParametersParser aggParser = new TermsParametersParser();
ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true).formattable(true).build();
ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true)
.formattable(true).build();
IncludeExclude.Parser incExcParser = new IncludeExclude.Parser();
aggParser.parse(aggregationName, parser, context, vsParser, incExcParser);

View File

@ -62,6 +62,7 @@ public abstract class ValuesSourceMetricsAggregationBuilder<B extends ValuesSour
/**
* Configure the value to use when documents miss a value.
*/
@SuppressWarnings("unchecked")
public B missing(Object missingValue) {
this.missing = missingValue;
return (B) this;

View File

@ -35,7 +35,7 @@ final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory.L
private final long precisionThreshold;
CardinalityAggregatorFactory(String name, ValuesSourceConfig config, long precisionThreshold) {
CardinalityAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, long precisionThreshold) {
super(name, InternalCardinality.TYPE.name(), config);
this.precisionThreshold = precisionThreshold;
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
import org.elasticsearch.search.internal.SearchContext;
@ -43,7 +44,7 @@ public class CardinalityParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser<?> vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build();
ValuesSourceParser<ValuesSource> vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build();
long precisionThreshold = -1;

View File

@ -40,7 +40,7 @@ public class ValueCountParser implements Aggregator.Parser {
@Override
public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException {
ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context)
ValuesSourceParser<?> vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context)
.build();
XContentParser.Token token;
@ -54,6 +54,6 @@ public class ValueCountParser implements Aggregator.Parser {
}
}
return new ValueCountAggregator.Factory(aggregationName, vsParser.config());
return new ValueCountAggregator.Factory<>(aggregationName, vsParser.config());
}
}

View File

@ -53,6 +53,9 @@ import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import java.io.IOException;
/**
* How to load values for an aggregation.
*/
public abstract class ValuesSource {
/**
@ -528,6 +531,7 @@ public abstract class ValuesSource {
return indexFieldData.load(context).getBytesValues();
}
@Override
public org.elasticsearch.index.fielddata.MultiGeoPointValues geoPointValues(LeafReaderContext context) {
return indexFieldData.load(context).getGeoPointValues();
}

View File

@ -78,19 +78,20 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource> ext
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException;
@SuppressWarnings("unchecked") // Safe because we check the types with isAssignableFrom
private void resolveValuesSourceConfigFromAncestors(String aggName, AggregatorFactory parent, Class<VS> requiredValuesSourceType) {
ValuesSourceConfig config;
ValuesSourceConfig<?> config;
while (parent != null) {
if (parent instanceof ValuesSourceAggregatorFactory) {
config = ((ValuesSourceAggregatorFactory) parent).config;
config = ((ValuesSourceAggregatorFactory<?>) parent).config;
if (config != null && config.valid()) {
if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType)) {
ValueFormat format = config.format;
this.config = config;
this.config = (ValuesSourceConfig<VS>) config;
// if the user explicitly defined a format pattern, we'll do our best to keep it even when we inherit the
// value source form one of the ancestor aggregations
if (this.config.formatPattern != null && format != null && format instanceof ValueFormat.Patternable) {
this.config.format = ((ValueFormat.Patternable) format).create(this.config.formatPattern);
this.config.format = ((ValueFormat.Patternable<?>) format).create(this.config.formatPattern);
}
return;
}

View File

@ -48,13 +48,16 @@ import java.util.HashMap;
import java.util.Map;
/**
*
* Parses a description of where to load the value sent by a user into a
* ValuesSourceConfig which can be used to work with the values in various ways,
* one of which is to create an actual ValueSource (done with the help of
* AggregationContext).
*/
public class ValuesSourceParser<VS extends ValuesSource> {
static final ParseField TIME_ZONE = new ParseField("time_zone");
public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) {
public static Builder<ValuesSource> any(String aggName, InternalAggregation.Type aggType, SearchContext context) {
return new Builder<>(aggName, aggType, context, ValuesSource.class);
}

View File

@ -36,7 +36,7 @@ import java.util.Locale;
* Collectors used in the search. Children CollectorResult's may be
* embedded inside of a parent CollectorResult
*/
public class CollectorResult implements ToXContent, Writeable {
public class CollectorResult implements ToXContent, Writeable<CollectorResult> {
public static final String REASON_SEARCH_COUNT = "search_count";
public static final String REASON_SEARCH_TOP_HITS = "search_top_hits";
@ -125,7 +125,7 @@ public class CollectorResult implements ToXContent, Writeable {
builder = builder.startObject()
.field(NAME.getPreferredName(), getName())
.field(REASON.getPreferredName(), getReason())
.field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0)));
.field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", getTime() / 1000000.0));
if (!children.isEmpty()) {
builder = builder.startArray(CHILDREN.getPreferredName());
@ -150,7 +150,7 @@ public class CollectorResult implements ToXContent, Writeable {
}
@Override
public Object readFrom(StreamInput in) throws IOException {
public CollectorResult readFrom(StreamInput in) throws IOException {
return new CollectorResult(in);
}
}

View File

@ -109,7 +109,7 @@ public class PipelineAggregationHelperTests extends ESTestCase {
* @param values Array of values to compute metric for
* @param metric A metric builder which defines what kind of metric should be returned for the values
*/
public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder metric) {
public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder<?> metric) {
if (metric instanceof MinBuilder) {
double accumulator = Double.POSITIVE_INFINITY;

View File

@ -77,7 +77,7 @@ public class MovAvgIT extends ESIntegTestCase {
static int period;
static HoltWintersModel.SeasonalityType seasonalityType;
static BucketHelpers.GapPolicy gapPolicy;
static ValuesSourceMetricsAggregationBuilder metric;
static ValuesSourceMetricsAggregationBuilder<?> metric;
static List<PipelineAggregationHelperTests.MockBucket> mockHisto;
static Map<String, ArrayList<Double>> testValues;
@ -864,7 +864,7 @@ public class MovAvgIT extends ESIntegTestCase {
public void testHoltWintersNotEnoughData() {
try {
SearchResponse response = client()
client()
.prepareSearch("idx").setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
@ -1003,7 +1003,7 @@ public class MovAvgIT extends ESIntegTestCase {
public void testBadModelParams() {
try {
SearchResponse response = client()
client()
.prepareSearch("idx").setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
@ -1248,7 +1248,7 @@ public class MovAvgIT extends ESIntegTestCase {
for (MovAvgModelBuilder builder : builders) {
try {
SearchResponse response = client()
client()
.prepareSearch("idx").setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
@ -1265,14 +1265,10 @@ public class MovAvgIT extends ESIntegTestCase {
// All good
}
}
}
private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) {
private void assertValidIterators(Iterator<?> expectedBucketIter, Iterator<?> expectedCountsIter, Iterator<?> expectedValuesIter) {
if (!expectedBucketIter.hasNext()) {
fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch");
}
@ -1355,7 +1351,7 @@ public class MovAvgIT extends ESIntegTestCase {
}
}
private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) {
private ValuesSourceMetricsAggregationBuilder<?> randomMetric(String name, String field) {
int rand = randomIntBetween(0,3);
switch (rand) {

View File

@ -60,7 +60,7 @@ public class SerialDiffIT extends ESIntegTestCase {
static int numBuckets;
static int lag;
static BucketHelpers.GapPolicy gapPolicy;
static ValuesSourceMetricsAggregationBuilder metric;
static ValuesSourceMetricsAggregationBuilder<?> metric;
static List<PipelineAggregationHelperTests.MockBucket> mockHisto;
static Map<String, ArrayList<Double>> testValues;
@ -80,7 +80,7 @@ public class SerialDiffIT extends ESIntegTestCase {
}
}
private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) {
private ValuesSourceMetricsAggregationBuilder<?> randomMetric(String name, String field) {
int rand = randomIntBetween(0,3);
switch (rand) {
@ -95,7 +95,7 @@ public class SerialDiffIT extends ESIntegTestCase {
}
}
private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) {
private void assertValidIterators(Iterator<?> expectedBucketIter, Iterator<?> expectedCountsIter, Iterator<?> expectedValuesIter) {
if (!expectedBucketIter.hasNext()) {
fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch");
}

View File

@ -79,7 +79,7 @@ public class ResourceWatcherServiceTests extends ESTestCase {
};
// checking default freq
WatcherHandle handle = service.add(watcher);
WatcherHandle<?> handle = service.add(watcher);
assertThat(handle, notNullValue());
assertThat(handle.frequency(), equalTo(ResourceWatcherService.Frequency.MEDIUM));
assertThat(service.lowMonitor.watchers.size(), is(0));