mirror of https://github.com/apache/druid.git
Merge pull request #108 from metamx/fix-cursor-gran
fix cursor overshooting granularity boundaries for filtered queries
This commit is contained in:
commit
f00c2d07d3
|
@ -164,7 +164,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
|
||||||
timeStart, new String[][]{}
|
timeStart, new String[][]{}
|
||||||
),
|
),
|
||||||
new IncrementalIndex.TimeAndDims(
|
new IncrementalIndex.TimeAndDims(
|
||||||
Math.min(actualInterval.getEndMillis(), gran.next(timeStart)), new String[][]{}
|
Math.min(actualInterval.getEndMillis(), gran.next(input)), new String[][]{}
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
time = gran.toDateTime(input);
|
time = gran.toDateTime(input);
|
||||||
|
|
|
@ -272,7 +272,7 @@ public class QueryableIndexStorageAdapter extends BaseStorageAdapter
|
||||||
}
|
}
|
||||||
|
|
||||||
final Offset offset = new TimestampCheckingOffset(
|
final Offset offset = new TimestampCheckingOffset(
|
||||||
baseOffset, timestamps, Math.min(interval.getEndMillis(), gran.next(timeStart))
|
baseOffset, timestamps, Math.min(interval.getEndMillis(), gran.next(input))
|
||||||
);
|
);
|
||||||
|
|
||||||
return new Cursor()
|
return new Cursor()
|
||||||
|
|
|
@ -375,6 +375,53 @@ public class TimeseriesQueryRunnerTest
|
||||||
TestHelper.assertExpectedResults(expectedResults2, results2);
|
TestHelper.assertExpectedResults(expectedResults2, results2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTimeseriesGranularityNotAlignedOnSegmentBoundariesWithFilter()
|
||||||
|
{
|
||||||
|
TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder()
|
||||||
|
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.filters(QueryRunnerTestHelper.providerDimension, "spot", "upfront", "total_market")
|
||||||
|
.granularity(new PeriodGranularity(new Period("P7D"), null, DateTimeZone.forID("America/Los_Angeles")))
|
||||||
|
.intervals(
|
||||||
|
Arrays.asList(
|
||||||
|
new Interval(
|
||||||
|
"2011-01-12T00:00:00.000-08:00/2011-01-20T00:00:00.000-08:00"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.aggregators(
|
||||||
|
Arrays.<AggregatorFactory>asList(
|
||||||
|
QueryRunnerTestHelper.rowsCount,
|
||||||
|
new LongSumAggregatorFactory(
|
||||||
|
"idx",
|
||||||
|
"index"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<Result<TimeseriesResultValue>> expectedResults1 = Arrays.asList(
|
||||||
|
new Result<TimeseriesResultValue>(
|
||||||
|
new DateTime("2011-01-06T00:00:00.000-08:00", DateTimeZone.forID("America/Los_Angeles")),
|
||||||
|
new TimeseriesResultValue(
|
||||||
|
ImmutableMap.<String, Object>of("rows", 13L, "idx", 6071L)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
new Result<TimeseriesResultValue>(
|
||||||
|
new DateTime("2011-01-13T00:00:00.000-08:00", DateTimeZone.forID("America/Los_Angeles")),
|
||||||
|
new TimeseriesResultValue(
|
||||||
|
ImmutableMap.<String, Object>of("rows", 91L, "idx", 33382L)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
Iterable<Result<TimeseriesResultValue>> results1 = Sequences.toList(
|
||||||
|
runner.run(query1),
|
||||||
|
Lists.<Result<TimeseriesResultValue>>newArrayList()
|
||||||
|
);
|
||||||
|
TestHelper.assertExpectedResults(expectedResults1, results1);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTimeseriesWithVaryingGranWithFilter()
|
public void testTimeseriesWithVaryingGranWithFilter()
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in New Issue