mirror of https://github.com/apache/druid.git
fix NPE for topN over missing hyperUniques column
This commit is contained in:
parent
6993d84f02
commit
f24a89a22a
|
@ -111,6 +111,12 @@ public class HyperUniquesAggregatorFactory implements AggregatorFactory
|
||||||
@Override
|
@Override
|
||||||
public int compare(HyperLogLogCollector lhs, HyperLogLogCollector rhs)
|
public int compare(HyperLogLogCollector lhs, HyperLogLogCollector rhs)
|
||||||
{
|
{
|
||||||
|
if(lhs == null) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if(rhs == null) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
return lhs.compareTo(rhs);
|
return lhs.compareTo(rhs);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -37,6 +37,7 @@ import io.druid.query.aggregation.AggregatorFactory;
|
||||||
import io.druid.query.aggregation.MaxAggregatorFactory;
|
import io.druid.query.aggregation.MaxAggregatorFactory;
|
||||||
import io.druid.query.aggregation.MinAggregatorFactory;
|
import io.druid.query.aggregation.MinAggregatorFactory;
|
||||||
import io.druid.query.aggregation.PostAggregator;
|
import io.druid.query.aggregation.PostAggregator;
|
||||||
|
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
|
||||||
import io.druid.query.dimension.ExtractionDimensionSpec;
|
import io.druid.query.dimension.ExtractionDimensionSpec;
|
||||||
import io.druid.query.extraction.RegexDimExtractionFn;
|
import io.druid.query.extraction.RegexDimExtractionFn;
|
||||||
import io.druid.query.filter.AndDimFilter;
|
import io.druid.query.filter.AndDimFilter;
|
||||||
|
@ -303,6 +304,46 @@ public class TopNQueryRunnerTest
|
||||||
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
|
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTopNOverMissingUniques()
|
||||||
|
{
|
||||||
|
TopNQuery query = new TopNQueryBuilder()
|
||||||
|
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.granularity(QueryRunnerTestHelper.allGran)
|
||||||
|
.dimension(marketDimension)
|
||||||
|
.metric(QueryRunnerTestHelper.uniqueMetric)
|
||||||
|
.threshold(3)
|
||||||
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
|
.aggregators(
|
||||||
|
Arrays.<AggregatorFactory>asList(new HyperUniquesAggregatorFactory("uniques", "missingUniques"))
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<Result<TopNResultValue>> expectedResults = Arrays.asList(
|
||||||
|
new Result<TopNResultValue>(
|
||||||
|
new DateTime("2011-01-12T00:00:00.000Z"),
|
||||||
|
new TopNResultValue(
|
||||||
|
Arrays.<Map<String, Object>>asList(
|
||||||
|
ImmutableMap.<String, Object>builder()
|
||||||
|
.put("market", "total_market")
|
||||||
|
.put("uniques", 0)
|
||||||
|
.build(),
|
||||||
|
ImmutableMap.<String, Object>builder()
|
||||||
|
.put("market", "spot")
|
||||||
|
.put("uniques", 0)
|
||||||
|
.build(),
|
||||||
|
ImmutableMap.<String, Object>builder()
|
||||||
|
.put("market", "upfront")
|
||||||
|
.put("uniques", 0)
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
HashMap<String, Object> context = new HashMap<String, Object>();
|
||||||
|
TestHelper.assertExpectedResults(expectedResults, runner.run(query, context));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTopNBySegment()
|
public void testTopNBySegment()
|
||||||
|
|
Loading…
Reference in New Issue