1) Adjust the Timeseries caching fixes to still store the long, but do the timezone adjustment on the way out.

2) Store a reference to the granularity object instead of getting it every time
This commit is contained in:
Eric Tschetter 2013-01-24 18:25:21 -06:00
parent 55ae4c87dd
commit ee7337fbb9
2 changed files with 10 additions and 7 deletions

View File

@ -29,6 +29,7 @@ import com.metamx.common.guava.ConcatSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.Query;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.index.v1.IncrementalIndex;
import com.metamx.druid.initialization.Initialization;
@ -125,11 +126,13 @@ public class GroupByQueryQueryToolChest implements QueryToolChest<Row, GroupByQu
Sequences.simple(index.iterableWithPostAggregations(query.getPostAggregatorSpecs())),
new Function<Row, Row>()
{
private final QueryGranularity granularity = query.getGranularity();
@Override
public Row apply(@Nullable Row input)
public Row apply(Row input)
{
final MapBasedRow row = (MapBasedRow) input;
return new MapBasedRow(query.getGranularity().toDateTime(row.getTimestampFromEpoch()), row.getEvent());
return new MapBasedRow(granularity.toDateTime(row.getTimestampFromEpoch()), row.getEvent());
}
}
);

View File

@ -28,6 +28,7 @@ import com.metamx.common.guava.MergeSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.nary.BinaryFn;
import com.metamx.druid.Query;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.ResultGranularTimestampComparator;
import com.metamx.druid.TimeseriesBinaryFn;
import com.metamx.druid.aggregation.AggregatorFactory;
@ -192,8 +193,7 @@ public class TimeseriesQueryQueryToolChest implements QueryToolChest<Result<Time
TimeseriesResultValue results = input.getValue();
final List<Object> retVal = Lists.newArrayListWithCapacity(1 + aggs.size());
// make sure to preserve timezone information when caching results
retVal.add(input.getTimestamp());
retVal.add(input.getTimestamp().getMillis());
for (AggregatorFactory agg : aggs) {
retVal.add(results.getMetric(agg.getName()));
}
@ -208,6 +208,8 @@ public class TimeseriesQueryQueryToolChest implements QueryToolChest<Result<Time
{
return new Function<Object, Result<TimeseriesResultValue>>()
{
private final QueryGranularity granularity = query.getGranularity();
@Override
public Result<TimeseriesResultValue> apply(@Nullable Object input)
{
@ -217,9 +219,7 @@ public class TimeseriesQueryQueryToolChest implements QueryToolChest<Result<Time
Iterator<AggregatorFactory> aggsIter = aggs.iterator();
Iterator<Object> resultIter = results.iterator();
DateTime timestamp = ISODateTimeFormat.dateTimeParser()
.withOffsetParsed()
.parseDateTime(resultIter.next().toString());
DateTime timestamp = granularity.toDateTime(((Number) resultIter.next()).longValue());
while (aggsIter.hasNext() && resultIter.hasNext()) {
final AggregatorFactory factory = aggsIter.next();