add some failing tests for groupBy

This commit is contained in:
fjy 2014-06-12 11:36:53 -07:00
parent 22e368e79b
commit 403962fe0b
4 changed files with 120 additions and 11 deletions

View File

@ -150,7 +150,7 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
@Override
public AggregatorFactory apply(String input)
{
return new JavaScriptAggregatorFactory(input, Arrays.asList(input), fnAggregate, fnReset, fnCombine);
return new JavaScriptAggregatorFactory(input, fieldNames, fnAggregate, fnReset, fnCombine);
}
}
);

View File

@ -156,7 +156,7 @@ public class CardinalityAggregatorFactory implements AggregatorFactory
@Override
public AggregatorFactory apply(String input)
{
return new CardinalityAggregatorFactory(input, Arrays.asList(input), byRow);
return new CardinalityAggregatorFactory(input, fieldNames, byRow);
}
}
);

View File

@ -237,7 +237,7 @@ public class IncrementalIndex implements Iterable<Row>
return new ObjectColumnSelector<Float>()
{
@Override
public Class classOfObject()
public Class<Float> classOfObject()
{
return Float.TYPE;
}
@ -250,12 +250,11 @@ public class IncrementalIndex implements Iterable<Row>
};
}
final List<String> dimensionValues = in.getDimension(columnName);
if (dimensionValues != null) {
if (getDimension(columnName) != null) {
return new ObjectColumnSelector<Object>()
{
@Override
public Class classOfObject()
public Class<Object> classOfObject()
{
return Object.class;
}
@ -263,14 +262,12 @@ public class IncrementalIndex implements Iterable<Row>
@Override
public Object get()
{
final String[] dimVals = dimensionValues.toArray(new String[]{});
final String[] dimVals = in.getDimension(columnName).toArray(new String[]{});
if (dimVals.length == 1) {
return dimVals[0];
}
else if (dimVals.length == 0) {
} else if (dimVals.length == 0) {
return null;
}
else {
} else {
return dimVals;
}
}

View File

@ -42,6 +42,7 @@ import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.JavaScriptAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.aggregation.MaxAggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
@ -1206,6 +1207,117 @@ public class GroupByQueryRunnerTest
TestHelper.assertExpectedObjects(expectedResults, results, "");
}
@Test
public void testSubqueryWithEverything()
{
final GroupByQuery subquery = GroupByQuery
.builder()
.setDataSource(QueryRunnerTestHelper.dataSource)
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("quality", "alias")))
.setDimFilter(new JavaScriptDimFilter("provider", "function(dim){ return true; }"))
.setAggregatorSpecs(
Arrays.asList(
QueryRunnerTestHelper.rowsCount,
new LongSumAggregatorFactory("idx_subagg", "index"),
new JavaScriptAggregatorFactory(
"js_agg",
Arrays.asList("index", "provider"),
"function(index, dim){return index + dim.length;}",
"function(){return 0;}",
"function(a,b){return a + b;}"
)
)
)
.setPostAggregatorSpecs(
Arrays.<PostAggregator>asList(
new ArithmeticPostAggregator(
"idx_subpostagg",
"+",
Arrays.asList(
new FieldAccessPostAggregator("the_idx_subagg", "idx_subagg"),
new ConstantPostAggregator("thousand", 1000, 1000)
)
)
)
)
.setHavingSpec(
new HavingSpec()
{
@Override
public boolean eval(Row row)
{
return (row.getFloatMetric("idx_subpostagg") < 3800);
}
}
)
.addOrderByColumn("alias")
.setGranularity(QueryRunnerTestHelper.dayGran)
.build();
final GroupByQuery query = GroupByQuery
.builder()
.setDataSource(subquery)
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
.setDimensions(Lists.<DimensionSpec>newArrayList(new DefaultDimensionSpec("alias", "alias")))
.setAggregatorSpecs(
Arrays.<AggregatorFactory>asList(
new LongSumAggregatorFactory("rows", "rows"),
new LongSumAggregatorFactory("idx", "idx_subpostagg"),
new DoubleSumAggregatorFactory("js_outer_agg", "js_agg")
)
)
.setPostAggregatorSpecs(
Arrays.<PostAggregator>asList(
new ArithmeticPostAggregator(
"idx", "+", Arrays.asList(
new FieldAccessPostAggregator("the_idx_agg", "idx"),
new ConstantPostAggregator("ten_thousand", 10000, 10000)
)
)
)
)
.setLimitSpec(
new DefaultLimitSpec(
Arrays.asList(
new OrderByColumnSpec(
"alias",
OrderByColumnSpec.Direction.DESCENDING
)
),
5
)
)
.setGranularity(QueryRunnerTestHelper.dayGran)
.build();
List<Row> expectedResults = Arrays.asList(
createExpectedRow("2011-04-01", "alias", "automotive", "rows", 1L, "idx", 11135.0),
createExpectedRow("2011-04-01", "alias", "business", "rows", 1L, "idx", 11118.0),
createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 11158.0),
createExpectedRow("2011-04-01", "alias", "health", "rows", 1L, "idx", 11120.0),
createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 11121.0),
createExpectedRow("2011-04-01", "alias", "technology", "rows", 1L, "idx", 11078.0),
createExpectedRow("2011-04-01", "alias", "travel", "rows", 1L, "idx", 11119.0),
createExpectedRow("2011-04-02", "alias", "automotive", "rows", 1L, "idx", 11147.0),
createExpectedRow("2011-04-02", "alias", "business", "rows", 1L, "idx", 11112.0),
createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 1L, "idx", 11166.0),
createExpectedRow("2011-04-02", "alias", "health", "rows", 1L, "idx", 11113.0),
createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 13447.0),
createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 11114.0),
createExpectedRow("2011-04-02", "alias", "premium", "rows", 3L, "idx", 13505.0),
createExpectedRow("2011-04-02", "alias", "technology", "rows", 1L, "idx", 11097.0),
createExpectedRow("2011-04-02", "alias", "travel", "rows", 1L, "idx", 11126.0)
);
// Subqueries are handled by the ToolChest
Iterable<Row> results = runQuery(query);
TestHelper.assertExpectedObjects(expectedResults, results, "");
}
private Iterable<Row> runQuery(GroupByQuery query)
{