mirror of https://github.com/apache/druid.git
Merge branch 'master' into task-stuff
This commit is contained in:
commit
e25b43b181
|
@ -215,7 +215,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
|
|||
while (baseIter.hasNext()) {
|
||||
currEntry.set(baseIter.next());
|
||||
if (filterMatcher.matches()) {
|
||||
break;
|
||||
return;
|
||||
}
|
||||
|
||||
numAdvanced++;
|
||||
|
|
|
@ -22,6 +22,7 @@ package com.metamx.druid.query.group;
|
|||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Supplier;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
|
@ -41,6 +42,7 @@ import com.metamx.druid.query.dimension.DefaultDimensionSpec;
|
|||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
import com.metamx.druid.query.segment.MultipleIntervalSegmentSpec;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Interval;
|
||||
import org.joda.time.Period;
|
||||
import org.junit.Test;
|
||||
|
@ -150,6 +152,70 @@ public class GroupByQueryRunnerTest
|
|||
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByWithTimeZone() {
|
||||
DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles");
|
||||
|
||||
GroupByQuery query = GroupByQuery.builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setInterval("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00")
|
||||
.setDimensions(
|
||||
Lists.newArrayList(
|
||||
(DimensionSpec) new DefaultDimensionSpec(
|
||||
"quality",
|
||||
"alias"
|
||||
)
|
||||
)
|
||||
)
|
||||
.setAggregatorSpecs(
|
||||
Arrays.<AggregatorFactory>asList(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
new LongSumAggregatorFactory(
|
||||
"idx",
|
||||
"index"
|
||||
)
|
||||
)
|
||||
)
|
||||
.setGranularity(
|
||||
new PeriodGranularity(
|
||||
new Period("P1D"),
|
||||
null,
|
||||
tz
|
||||
)
|
||||
)
|
||||
.build();
|
||||
|
||||
List<Row> expectedResults = Arrays.asList(
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "automotive", "rows", 1L, "idx", 135L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "business", "rows", 1L, "idx", 118L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "entertainment", "rows", 1L, "idx", 158L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "health", "rows", 1L, "idx", 120L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "mezzanine", "rows", 3L, "idx", 2870L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "news", "rows", 1L, "idx", 121L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "premium", "rows", 3L, "idx", 2900L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "technology", "rows", 1L, "idx", 78L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-03-31", tz),ImmutableMap.<String, Object>of("alias", "travel", "rows", 1L, "idx", 119L)),
|
||||
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "automotive", "rows", 1L, "idx", 147L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "business", "rows", 1L, "idx", 112L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "entertainment", "rows", 1L, "idx", 166L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "health", "rows", 1L, "idx", 113L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "mezzanine", "rows", 3L, "idx", 2447L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "news", "rows", 1L, "idx", 114L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "premium", "rows", 3L, "idx", 2505L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "technology", "rows", 1L, "idx", 97L)),
|
||||
(Row) new MapBasedRow(new DateTime("2011-04-01", tz),ImmutableMap.<String, Object>of("alias", "travel", "rows", 1L, "idx", 126L))
|
||||
);
|
||||
|
||||
Iterable<Row> results = Sequences.toList(
|
||||
runner.run(query),
|
||||
Lists.<Row>newArrayList()
|
||||
);
|
||||
|
||||
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMergeResults() {
|
||||
GroupByQuery.Builder builder = GroupByQuery
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
package com.metamx.druid.query.timeseries;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.metamx.common.guava.Sequences;
|
||||
import com.metamx.druid.Druids;
|
||||
import com.metamx.druid.Query;
|
||||
import com.metamx.druid.QueryGranularity;
|
||||
import com.metamx.druid.aggregation.AggregatorFactory;
|
||||
import com.metamx.druid.aggregation.CountAggregatorFactory;
|
||||
import com.metamx.druid.index.IncrementalIndexSegment;
|
||||
import com.metamx.druid.index.Segment;
|
||||
import com.metamx.druid.index.v1.IncrementalIndex;
|
||||
import com.metamx.druid.input.MapBasedInputRow;
|
||||
import com.metamx.druid.query.FinalizeResultsQueryRunner;
|
||||
import com.metamx.druid.query.QueryRunner;
|
||||
import com.metamx.druid.query.QueryRunnerFactory;
|
||||
import com.metamx.druid.result.Result;
|
||||
import com.metamx.druid.result.TimeseriesResultValue;
|
||||
import junit.framework.Assert;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class TimeseriesQueryRunnerBonusTest
|
||||
{
|
||||
@Test
|
||||
public void testOneRowAtATime() throws Exception
|
||||
{
|
||||
final IncrementalIndex oneRowIndex = new IncrementalIndex(
|
||||
new DateTime("2012-01-01T00:00:00Z").getMillis(), QueryGranularity.NONE, new AggregatorFactory[]{}
|
||||
);
|
||||
|
||||
List<Result<TimeseriesResultValue>> results;
|
||||
|
||||
oneRowIndex.add(
|
||||
new MapBasedInputRow(
|
||||
new DateTime("2012-01-01T00:00:00Z").getMillis(),
|
||||
ImmutableList.of("dim1"),
|
||||
ImmutableMap.<String, Object>of("dim1", "x")
|
||||
)
|
||||
);
|
||||
|
||||
results = runTimeseriesCount(oneRowIndex);
|
||||
|
||||
Assert.assertEquals("index size", 1, oneRowIndex.size());
|
||||
Assert.assertEquals("result size", 1, results.size());
|
||||
Assert.assertEquals("result timestamp", new DateTime("2012-01-01T00:00:00Z"), results.get(0).getTimestamp());
|
||||
Assert.assertEquals("result count metric", 1, (long) results.get(0).getValue().getLongMetric("rows"));
|
||||
|
||||
oneRowIndex.add(
|
||||
new MapBasedInputRow(
|
||||
new DateTime("2012-01-01T00:00:00Z").getMillis(),
|
||||
ImmutableList.of("dim1"),
|
||||
ImmutableMap.<String, Object>of("dim1", "y")
|
||||
)
|
||||
);
|
||||
|
||||
results = runTimeseriesCount(oneRowIndex);
|
||||
|
||||
Assert.assertEquals("index size", 2, oneRowIndex.size());
|
||||
Assert.assertEquals("result size", 1, results.size());
|
||||
Assert.assertEquals("result timestamp", new DateTime("2012-01-01T00:00:00Z"), results.get(0).getTimestamp());
|
||||
Assert.assertEquals("result count metric", 2, (long) results.get(0).getValue().getLongMetric("rows"));
|
||||
}
|
||||
|
||||
private static List<Result<TimeseriesResultValue>> runTimeseriesCount(IncrementalIndex index)
|
||||
{
|
||||
final QueryRunnerFactory factory = new TimeseriesQueryRunnerFactory();
|
||||
final QueryRunner<Result<TimeseriesResultValue>> runner = makeQueryRunner(
|
||||
factory,
|
||||
new IncrementalIndexSegment(index)
|
||||
);
|
||||
|
||||
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource("xxx")
|
||||
.granularity(QueryGranularity.ALL)
|
||||
.intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D")))
|
||||
.aggregators(
|
||||
ImmutableList.<AggregatorFactory>of(
|
||||
new CountAggregatorFactory("rows")
|
||||
)
|
||||
)
|
||||
.build();
|
||||
|
||||
return Sequences.toList(
|
||||
runner.run(query),
|
||||
Lists.<Result<TimeseriesResultValue>>newArrayList()
|
||||
);
|
||||
}
|
||||
|
||||
private static <T> QueryRunner<T> makeQueryRunner(
|
||||
QueryRunnerFactory<T, Query<T>> factory,
|
||||
Segment adapter
|
||||
)
|
||||
{
|
||||
return new FinalizeResultsQueryRunner<T>(
|
||||
factory.createRunner(adapter),
|
||||
factory.getToolchest()
|
||||
);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue