Enable back testcases in CalciteWindowQueryTest (#15045)

Most of the testcases were disabled in CalciteWindowQueryTest during the Calcite-1.35 upgrade; there were some changes arising from the fact that the removal of DRUID_SUM had some unexpected sideffects:

SqlStdOperatorTable.SUM became the SUM operator
because of that SqlToRelConverter started rewriting windowed SUM -s into SUM0 -s
my opinion is that w.r.t to Druid this rewrite provides no real advantage - as SUM0 is serviced by SUM here
I believe that's not 100% correct in cases when it aggregates just null-s but that doesnt matter in this case
I propose to introduce back a local DRUID_SUM thing as an unchanged SUM and later when CALCITE-6020 is fixed ; we can drop that.
This commit is contained in:
Zoltan Haindrich 2023-10-03 06:48:44 +02:00 committed by GitHub
parent 261f54dc04
commit f3d1c8b70e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 42 additions and 15 deletions

View File

@ -19,9 +19,10 @@
package org.apache.druid.sql.calcite.aggregation.builtin;
import org.apache.calcite.linq4j.Nullness;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.sql.SqlAggFunction;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.fun.SqlSumAggFunction;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
@ -35,11 +36,17 @@ import javax.annotation.Nullable;
public class SumSqlAggregator extends SimpleSqlAggregator
{
/**
* We use this custom aggregation function instead of builtin SqlStdOperatorTable.SUM
* to avoid transformation to COUNT+SUM0. See CALCITE-6020 for more details.
* It can be handled differently after CALCITE-6020 is addressed.
*/
private static final SqlAggFunction DRUID_SUM = new SqlSumAggFunction(Nullness.castNonNull(null)) {};
@Override
public SqlAggFunction calciteFunction()
{
return SqlStdOperatorTable.SUM;
return DRUID_SUM;
}
@Override

View File

@ -28,11 +28,15 @@ import org.apache.druid.common.config.NullHandling;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.operator.OperatorFactory;
import org.apache.druid.query.operator.WindowOperatorQuery;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.CalciteWindowQueryTest.WindowQueryTestInputClass.TestType;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -47,6 +51,10 @@ import java.util.Locale;
import java.util.Objects;
import java.util.function.Function;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
/**
* These tests are file-based, look in resources -> calcite/tests/window for the set of test specifications.
*/
@ -116,15 +124,14 @@ public class CalciteWindowQueryTest extends BaseCalciteQueryTest
}
};
if ("failingTest".equals(input.type)) {
return;
}
assumeThat(input.type, Matchers.not(TestType.failingTest));
if ("operatorValidation".equals(input.type)) {
if (input.type == TestType.operatorValidation) {
testBuilder()
.skipVectorize(true)
.sql(input.sql)
.queryContext(ImmutableMap.of(PlannerContext.CTX_ENABLE_WINDOW_FNS, true))
.queryContext(ImmutableMap.of(PlannerContext.CTX_ENABLE_WINDOW_FNS, true,
QueryContexts.ENABLE_DEBUG, true))
.addCustomVerification(QueryVerification.ofResults(results -> {
if (results.exception != null) {
throw new RE(results.exception, "Failed to execute because of exception.");
@ -140,7 +147,7 @@ public class CalciteWindowQueryTest extends BaseCalciteQueryTest
// and aggregations=[CountAggregatorFactory{name='w0'}, LongSumAggregatorFactory{fieldName='a0', expression='null', name='w1'}]}}]}
// These 2 tests are marked as failingTests to unblock testing at this moment
final WindowOperatorQuery query = (WindowOperatorQuery) results.recordedQueries.get(0);
final WindowOperatorQuery query = getWindowOperatorQuery(results.recordedQueries);
for (int i = 0; i < input.expectedOperators.size(); ++i) {
final OperatorFactory expectedOperator = input.expectedOperators.get(i);
final OperatorFactory actualOperator = query.getOperators().get(i);
@ -199,6 +206,14 @@ public class CalciteWindowQueryTest extends BaseCalciteQueryTest
}
}
private WindowOperatorQuery getWindowOperatorQuery(List<Query<?>> queries)
{
assertEquals(1, queries.size());
Object query = queries.get(0);
assertTrue(query instanceof WindowOperatorQuery);
return (WindowOperatorQuery) query;
}
private void maybeDumpActualResults(
Function<Object, String> toStrFn, List<Object[]> results
)
@ -212,8 +227,13 @@ public class CalciteWindowQueryTest extends BaseCalciteQueryTest
public static class WindowQueryTestInputClass
{
enum TestType
{
failingTest,
operatorValidation
}
@JsonProperty
public String type;
public TestType type;
@JsonProperty
public String sql;

View File

@ -1,4 +1,4 @@
type: "failingTest"
type: "operatorValidation"
sql: |
SELECT

View File

@ -1,4 +1,4 @@
type: "failingTest"
type: "operatorValidation"
sql: |
SELECT

View File

@ -1,4 +1,4 @@
type: "failingTest"
type: "operatorValidation"
sql: |
SELECT

View File

@ -1,4 +1,4 @@
type: "failingTest"
type: "operatorValidation"
sql: |
SELECT

View File

@ -1,4 +1,4 @@
type: "failingTest"
type: "operatorValidation"
sql: |
SELECT

View File

@ -1,4 +1,4 @@
type: "failingTest"
type: "operatorValidation"
# Like wikipediaSimplePartition, but requires re-sorting the input data because the order of the GROUP BY
# does not match the required order for window partitioning. ("t" and "countryIsoCode" are flipped.)