mirror of
https://github.com/apache/druid.git
synced 2025-02-17 07:25:02 +00:00
Fix overzealous timeseries zero-filling.
When the index's maxTime is not aligned with the query granularity, gran.next can cause an time extra bucket to get zero-filled. Truncating first prevents that.
This commit is contained in:
parent
28e3191503
commit
da48a713ae
@ -138,7 +138,10 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
|
||||
{
|
||||
Interval actualInterval = interval;
|
||||
|
||||
final Interval dataInterval = new Interval(getMinTime().getMillis(), gran.next(getMaxTime().getMillis()));
|
||||
final Interval dataInterval = new Interval(
|
||||
getMinTime().getMillis(),
|
||||
gran.next(gran.truncate(getMaxTime().getMillis()))
|
||||
);
|
||||
|
||||
if (!actualInterval.overlaps(dataInterval)) {
|
||||
return Sequences.empty();
|
||||
|
@ -133,8 +133,11 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
|
||||
|
||||
Interval actualIntervalTmp = interval;
|
||||
|
||||
final Interval dataInterval = new Interval(
|
||||
getMinTime().getMillis(),
|
||||
gran.next(gran.truncate(getMaxTime().getMillis()))
|
||||
);
|
||||
|
||||
final Interval dataInterval = new Interval(getMinTime().getMillis(), gran.next(getMaxTime().getMillis()));
|
||||
if (!actualIntervalTmp.overlaps(dataInterval)) {
|
||||
return Sequences.empty();
|
||||
}
|
||||
|
@ -465,6 +465,53 @@ public class TimeseriesQueryRunnerTest
|
||||
TestHelper.assertExpectedResults(expectedResults1, results1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTimeseriesQueryGranularityNotAlignedWithRollupGranularity()
|
||||
{
|
||||
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.filters(QueryRunnerTestHelper.providerDimension, "spot", "upfront", "total_market")
|
||||
.granularity(
|
||||
new PeriodGranularity(
|
||||
new Period("PT1H"),
|
||||
new DateTime(60000),
|
||||
DateTimeZone.UTC
|
||||
)
|
||||
)
|
||||
.intervals(
|
||||
Arrays.asList(
|
||||
new Interval(
|
||||
"2011-04-15T00:00:00.000Z/2012"
|
||||
)
|
||||
)
|
||||
)
|
||||
.aggregators(
|
||||
Arrays.<AggregatorFactory>asList(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
new LongSumAggregatorFactory(
|
||||
"idx",
|
||||
"index"
|
||||
)
|
||||
)
|
||||
)
|
||||
.build();
|
||||
|
||||
List<Result<TimeseriesResultValue>> expectedResults1 = Arrays.asList(
|
||||
new Result<TimeseriesResultValue>(
|
||||
new DateTime("2011-04-14T23:01Z"),
|
||||
new TimeseriesResultValue(
|
||||
ImmutableMap.<String, Object>of("rows", 13L, "idx", 4717L)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
Iterable<Result<TimeseriesResultValue>> results1 = Sequences.toList(
|
||||
runner.run(query1),
|
||||
Lists.<Result<TimeseriesResultValue>>newArrayList()
|
||||
);
|
||||
TestHelper.assertExpectedResults(expectedResults1, results1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTimeseriesWithVaryingGranWithFilter()
|
||||
{
|
||||
|
@ -35,7 +35,6 @@ import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
|
||||
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
|
||||
import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
|
||||
import io.druid.segment.column.ColumnConfig;
|
||||
import io.druid.segment.incremental.IncrementalIndex;
|
||||
import io.druid.segment.serde.ComplexMetrics;
|
||||
import org.joda.time.DateTime;
|
||||
@ -69,7 +68,7 @@ public class TestIndex
|
||||
};
|
||||
public static final String[] DIMENSIONS = new String[]{"provider", "quALIty", "plAcEmEnT", "pLacementish"};
|
||||
public static final String[] METRICS = new String[]{"iNdEx"};
|
||||
private static final Interval DATA_INTERVAL = new Interval("2011-01-12T00:00:00.000Z/2011-04-16T00:00:00.000Z");
|
||||
private static final Interval DATA_INTERVAL = new Interval("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z");
|
||||
private static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{
|
||||
new DoubleSumAggregatorFactory(METRICS[0], METRICS[0]),
|
||||
new HyperUniquesAggregatorFactory("quality_uniques", "quality")
|
||||
|
Loading…
x
Reference in New Issue
Block a user