Merge branch 'master' into guice

This commit is contained in:
cheddar 2013-08-29 13:59:59 -05:00
commit 5ad023cd3f
13 changed files with 52 additions and 21 deletions

View File

@ -28,7 +28,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -28,7 +28,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -6,11 +6,6 @@ druid.service=realtime
com.metamx.emitter.logging=true
com.metamx.emitter.logging.level=info
# zk
druid.zk.service.host=localhost
druid.zk.paths.base=/druid
druid.zk.paths.discoveryPath=/druid/discoveryPath
# processing
druid.processing.buffer.sizeBytes=10000000
@ -34,4 +29,4 @@ druid.paths.indexCache=/tmp/druid/indexCache
# handoff
druid.pusher.local.storageDirectory=/tmp/druid/localStorage
druid.pusher.local=true
druid.pusher.local=true

View File

@ -9,7 +9,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -28,7 +28,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -28,7 +28,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -28,7 +28,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -23,7 +23,7 @@
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<packaging>pom</packaging>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
<name>druid</name>
<description>druid</description>
<scm>

View File

@ -28,7 +28,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -28,7 +28,7 @@
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>

View File

@ -53,6 +53,7 @@ import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -281,7 +282,7 @@ public class GroupByQueryEngine
private final List<DimensionSpec> dimensionSpecs;
private final List<DimensionSelector> dimensions;
private final String[] dimNames;
private final ArrayList<String> dimNames;
private final List<AggregatorFactory> aggregatorSpecs;
private final BufferAggregator[] aggregators;
private final String[] metricNames;
@ -301,14 +302,14 @@ public class GroupByQueryEngine
delegate = Iterators.emptyIterator();
dimensionSpecs = query.getDimensions();
dimensions = Lists.newArrayListWithExpectedSize(dimensionSpecs.size());
dimNames = new String[dimensionSpecs.size()];
dimNames = Lists.newArrayListWithExpectedSize(dimensionSpecs.size());
for (int i = 0; i < dimensionSpecs.size(); ++i) {
final DimensionSpec dimSpec = dimensionSpecs.get(i);
final DimensionSelector selector = cursor.makeDimensionSelector(dimSpec.getDimension());
if (selector != null) {
dimensions.add(selector);
dimNames.add(dimSpec.getOutputName());
}
dimNames[i] = dimSpec.getOutputName();
}
aggregatorSpecs = query.getAggregatorSpecs();
@ -384,7 +385,7 @@ public class GroupByQueryEngine
ByteBuffer keyBuffer = input.getKey().duplicate();
for (int i = 0; i < dimensions.size(); ++i) {
theEvent.put(dimNames[i], dimensions.get(i).lookupName(keyBuffer.getInt()));
theEvent.put(dimNames.get(i), dimensions.get(i).lookupName(keyBuffer.getInt()));
}
int position = input.getValue();

View File

@ -700,6 +700,41 @@ public class GroupByQueryRunnerTest
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit");
}
@Test
public void testGroupByWithNonexistantDimension() throws Exception
{
GroupByQuery.Builder builder = GroupByQuery
.builder()
.setDataSource(QueryRunnerTestHelper.dataSource)
.setInterval("2011-04-02/2011-04-04")
.addDimension("billy")
.addDimension("quality")
.setAggregatorSpecs(
Arrays.<AggregatorFactory>asList(
QueryRunnerTestHelper.rowsCount
)
)
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
final GroupByQuery query = builder.build();
List<Row> expectedResults = Arrays.asList(
createExpectedRow("2011-04-01", "quality", "automotive", "rows", 2L),
createExpectedRow("2011-04-01", "quality", "business", "rows", 2L),
createExpectedRow("2011-04-01", "quality", "entertainment", "rows", 2L),
createExpectedRow("2011-04-01", "quality", "health", "rows", 2L),
createExpectedRow("2011-04-01", "quality", "mezzanine", "rows", 6L),
createExpectedRow("2011-04-01", "quality", "news", "rows", 2L),
createExpectedRow("2011-04-01", "quality", "premium", "rows", 6L),
createExpectedRow("2011-04-01", "quality", "technology", "rows", 2L),
createExpectedRow("2011-04-01", "quality", "travel", "rows", 2L)
);
TestHelper.assertExpectedObjects(expectedResults, runner.run(query), "normal");
QueryRunner<Row> mergeRunner = new GroupByQueryQueryToolChest().mergeResults(runner);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit");
}
private Row createExpectedRow(final String timestamp, Object... vals)
{
return createExpectedRow(new DateTime(timestamp), vals);

View File

@ -24,11 +24,11 @@
<artifactId>druid-services</artifactId>
<name>druid-services</name>
<description>druid-services</description>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
<parent>
<groupId>com.metamx</groupId>
<artifactId>druid</artifactId>
<version>0.5.49-SNAPSHOT</version>
<version>0.5.50-SNAPSHOT</version>
</parent>
<dependencies>