Fix java.lang.ClassCastException error when using useApproximateCountDistinct false for aggregation query (#12216)

* add imply

* add test

* add unit test

* add test
This commit is contained in:
Maytas Monsereenusorn 2022-02-03 12:01:13 -08:00 committed by GitHub
parent fc76b014d1
commit 3717693633
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 94 additions and 1 deletions

View File

@ -188,7 +188,8 @@ public class PartialDruidQuery
relBuilder.push(selectProject.getInput()); relBuilder.push(selectProject.getInput());
relBuilder.project( relBuilder.project(
newProjectRexNodes, newProjectRexNodes,
newSelectProject.getRowType().getFieldNames() newSelectProject.getRowType().getFieldNames(),
true
); );
theProject = (Project) relBuilder.build(); theProject = (Project) relBuilder.build();
} }

View File

@ -8173,6 +8173,98 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
); );
} }
@Test
public void testQueryWithSelectProjectAndIdentityProjectDoesNotRename() throws Exception
{
cannotVectorize();
requireMergeBuffers(3);
testQuery(
PLANNER_CONFIG_NO_HLL.withOverrides(ImmutableMap.of(
PlannerConfig.CTX_KEY_USE_GROUPING_SET_FOR_EXACT_DISTINCT,
"true"
)),
"SELECT\n"
+ "(SUM(CASE WHEN (TIMESTAMP '2000-01-04 17:00:00'<=__time AND __time<TIMESTAMP '2022-01-05 17:00:00') THEN 1 ELSE 0 END)*1.0/COUNT(DISTINCT CASE WHEN (TIMESTAMP '2000-01-04 17:00:00'<=__time AND __time<TIMESTAMP '2022-01-05 17:00:00') THEN dim1 END))\n"
+ "FROM druid.foo\n"
+ "GROUP BY ()",
CalciteTests.REGULAR_USER_AUTH_RESULT,
ImmutableList.of(
GroupByQuery.builder()
.setDataSource(
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
.setInterval(querySegmentSpec(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
expressionVirtualColumn(
"v0",
"case_searched(((947005200000 <= \"__time\") && (\"__time\" < 1641402000000)),\"dim1\",null)",
ColumnType.STRING
)
)
.setDimensions(
dimensions(
new DefaultDimensionSpec(
"v0",
"d0",
ColumnType.STRING
)
)
)
.setAggregatorSpecs(
aggregators(
new LongSumAggregatorFactory(
"a0",
null,
"case_searched(((947005200000 <= \"__time\") && (\"__time\" < 1641402000000)),1,0)",
ExprMacroTable.nil()
),
new GroupingAggregatorFactory(
"a1",
ImmutableList.of("v0")
)
)
)
.setSubtotalsSpec(
ImmutableList.of(
ImmutableList.of("d0"),
ImmutableList.of()
)
)
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
)
)
.setInterval(querySegmentSpec(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setAggregatorSpecs(
aggregators(
new FilteredAggregatorFactory(
new LongMinAggregatorFactory("_a0", "a0"),
selector("a1", "1", null)
),
new FilteredAggregatorFactory(
new CountAggregatorFactory("_a1"),
and(not(selector("d0", null, null)), selector("a1", "0", null))
)
)
)
.setPostAggregatorSpecs(Collections.singletonList(new ExpressionPostAggregator(
"p0",
"((\"_a0\" * 1.0) / \"_a1\")",
null,
ExprMacroTable.nil()
)))
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{1.0d}
)
);
}
@Test @Test
public void testGroupByFloorWithOrderBy() throws Exception public void testGroupByFloorWithOrderBy() throws Exception
{ {