SQL: Improve translation of time floor expressions. (#5107)

* SQL: Improve translation of time floor expressions.

The main change is to TimeFloorOperatorConversion.applyTimestampFloor.

- Prefer timestamp_floor expressions to timeFormat extractionFns, to
  avoid turning things into strings when it isn't necessary.
- Collapse CAST(FLOOR(X TO Y) AS DATE) to FLOOR(X TO Y) if appropriate.

* Fix tests.
This commit is contained in:
Gian Merlino 2017-11-29 12:06:03 -08:00 committed by Fangjin Yang
parent 55d894e104
commit 5f6bdd940b
10 changed files with 347 additions and 160 deletions

View File

@ -35,6 +35,7 @@ import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import javax.annotation.Nullable;
import java.lang.reflect.Modifier;
import java.util.List;
import java.util.Map;
@ -147,6 +148,16 @@ public class Parser
return Lists.newArrayList(found);
}
@Nullable
public static String getIdentifierIfIdentifier(Expr expr)
{
if (expr instanceof IdentifierExpr) {
return expr.toString();
} else {
return null;
}
}
public static Expr.ObjectBinding withMap(final Map<String, ?> bindings)
{
return bindings::get;

View File

@ -28,6 +28,8 @@ import org.joda.time.DateTimeZone;
import org.joda.time.Period;
import org.joda.time.chrono.ISOChronology;
import javax.annotation.Nullable;
public class ExprUtils
{
private static final Expr.ObjectBinding NIL_BINDINGS = name -> null;
@ -49,8 +51,8 @@ public class ExprUtils
public static PeriodGranularity toPeriodGranularity(
final Expr periodArg,
final Expr originArg,
final Expr timeZoneArg,
@Nullable final Expr originArg,
@Nullable final Expr timeZoneArg,
final Expr.ObjectBinding bindings
)
{

View File

@ -21,7 +21,6 @@ package io.druid.query.expression;
import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.granularity.PeriodGranularity;
import io.druid.math.expr.Expr;
import io.druid.math.expr.ExprEval;
@ -52,7 +51,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
}
}
private static PeriodGranularity getGranularity(final List<Expr> args, final Expr.ObjectBinding bindings)
private static PeriodGranularity computeGranularity(final List<Expr> args, final Expr.ObjectBinding bindings)
{
return ExprUtils.toPeriodGranularity(
args.get(1),
@ -62,15 +61,31 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
);
}
private static class TimestampFloorExpr implements Expr
public static class TimestampFloorExpr implements Expr
{
private final Expr arg;
private final Granularity granularity;
private final PeriodGranularity granularity;
public TimestampFloorExpr(final List<Expr> args)
{
this.arg = args.get(0);
this.granularity = getGranularity(args, ExprUtils.nilBindings());
this.granularity = computeGranularity(args, ExprUtils.nilBindings());
}
/**
* Exposed for Druid SQL: this is used by Expressions.toQueryGranularity.
*/
public Expr getArg()
{
return arg;
}
/**
* Exposed for Druid SQL: this is used by Expressions.toQueryGranularity.
*/
public PeriodGranularity getGranularity()
{
return granularity;
}
@Nonnull
@ -88,7 +103,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
}
}
private static class TimestampFloorDynamicExpr implements Expr
public static class TimestampFloorDynamicExpr implements Expr
{
private final List<Expr> args;
@ -101,7 +116,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
@Override
public ExprEval eval(final ObjectBinding bindings)
{
final PeriodGranularity granularity = getGranularity(args, bindings);
final PeriodGranularity granularity = computeGranularity(args, bindings);
return ExprEval.of(granularity.bucketStart(DateTimes.utc(args.get(0).eval(bindings).asLong())).getMillis());
}

View File

@ -25,7 +25,11 @@ import com.google.common.collect.Lists;
import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.math.expr.Expr;
import io.druid.math.expr.ExprMacroTable;
import io.druid.math.expr.ExprType;
import io.druid.math.expr.Parser;
import io.druid.query.expression.TimestampFloorExprMacro;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.extraction.TimeFormatExtractionFn;
import io.druid.query.filter.AndDimFilter;
@ -334,14 +338,55 @@ public class Expressions
flip = true;
}
// Flip operator, maybe.
final SqlKind flippedKind;
if (flip) {
switch (kind) {
case EQUALS:
case NOT_EQUALS:
flippedKind = kind;
break;
case GREATER_THAN:
flippedKind = SqlKind.LESS_THAN;
break;
case GREATER_THAN_OR_EQUAL:
flippedKind = SqlKind.LESS_THAN_OR_EQUAL;
break;
case LESS_THAN:
flippedKind = SqlKind.GREATER_THAN;
break;
case LESS_THAN_OR_EQUAL:
flippedKind = SqlKind.GREATER_THAN_OR_EQUAL;
break;
default:
throw new ISE("WTF?! Kind[%s] not expected here", kind);
}
} else {
flippedKind = kind;
}
// rhs must be a literal
if (rhs.getKind() != SqlKind.LITERAL) {
return null;
}
// lhs must be translatable to a SimpleExtraction to be simple-filterable
// Translate lhs to a DruidExpression.
final DruidExpression lhsExpression = toDruidExpression(plannerContext, rowSignature, lhs);
if (lhsExpression == null || !lhsExpression.isSimpleExtraction()) {
if (lhsExpression == null) {
return null;
}
// Special handling for filters on FLOOR(__time TO granularity).
final Granularity queryGranularity = toQueryGranularity(lhsExpression, plannerContext.getExprMacroTable());
if (queryGranularity != null) {
// lhs is FLOOR(__time TO granularity); rhs must be a timestamp
final long rhsMillis = Calcites.calciteDateTimeLiteralToJoda(rhs, plannerContext.getTimeZone()).getMillis();
return buildTimeFloorFilter(Column.TIME_COLUMN_NAME, queryGranularity, flippedKind, rhsMillis);
}
// In the general case, lhs must be translatable to a SimpleExtraction to be simple-filterable.
if (!lhsExpression.isSimpleExtraction()) {
return null;
}
@ -364,27 +409,28 @@ public class Expressions
// Create a BoundRefKey that strips the extractionFn and compares __time as a number.
final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC);
if (kind == SqlKind.EQUALS) {
switch (flippedKind) {
case EQUALS:
return rhsAligned
? Bounds.interval(boundRefKey, rhsInterval)
: Filtration.matchNothing();
} else if (kind == SqlKind.NOT_EQUALS) {
case NOT_EQUALS:
return rhsAligned
? new NotDimFilter(Bounds.interval(boundRefKey, rhsInterval))
: Filtration.matchEverything();
} else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) {
case GREATER_THAN:
return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) {
case GREATER_THAN_OR_EQUAL:
return rhsAligned
? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
: Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) {
case LESS_THAN:
return rhsAligned
? Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
: Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) {
case LESS_THAN_OR_EQUAL:
return Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
} else {
default:
throw new IllegalStateException("WTF?! Shouldn't have got here...");
}
}
@ -414,19 +460,26 @@ public class Expressions
final DimFilter filter;
// Always use BoundDimFilters, to simplify filter optimization later (it helps to remember the comparator).
if (kind == SqlKind.EQUALS) {
switch (flippedKind) {
case EQUALS:
filter = Bounds.equalTo(boundRefKey, val);
} else if (kind == SqlKind.NOT_EQUALS) {
break;
case NOT_EQUALS:
filter = new NotDimFilter(Bounds.equalTo(boundRefKey, val));
} else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) {
break;
case GREATER_THAN:
filter = Bounds.greaterThan(boundRefKey, val);
} else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) {
break;
case GREATER_THAN_OR_EQUAL:
filter = Bounds.greaterThanOrEqualTo(boundRefKey, val);
} else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) {
break;
case LESS_THAN:
filter = Bounds.lessThan(boundRefKey, val);
} else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) {
break;
case LESS_THAN_OR_EQUAL:
filter = Bounds.lessThanOrEqualTo(boundRefKey, val);
} else {
break;
default:
throw new IllegalStateException("WTF?! Shouldn't have got here...");
}
@ -482,4 +535,86 @@ public class Expressions
? null
: new ExpressionDimFilter(druidExpression.getExpression(), plannerContext.getExprMacroTable());
}
/**
* Converts an expression to a Granularity, if possible. This is possible if, and only if, the expression
* is a timestamp_floor function on the __time column with literal parameters for period, origin, and timeZone.
*
* @return granularity or null if not possible
*/
@Nullable
public static Granularity toQueryGranularity(final DruidExpression expression, final ExprMacroTable macroTable)
{
final TimestampFloorExprMacro.TimestampFloorExpr expr = asTimestampFloorExpr(expression, macroTable);
if (expr == null) {
return null;
}
final Expr arg = expr.getArg();
final Granularity granularity = expr.getGranularity();
if (Column.TIME_COLUMN_NAME.equals(Parser.getIdentifierIfIdentifier(arg))) {
return granularity;
} else {
return null;
}
}
@Nullable
public static TimestampFloorExprMacro.TimestampFloorExpr asTimestampFloorExpr(
final DruidExpression expression,
final ExprMacroTable macroTable
)
{
final Expr expr = Parser.parse(expression.getExpression(), macroTable);
if (expr instanceof TimestampFloorExprMacro.TimestampFloorExpr) {
return (TimestampFloorExprMacro.TimestampFloorExpr) expr;
} else {
return null;
}
}
/**
* Build a filter for an expression like FLOOR(column TO granularity) [operator] rhsMillis
*/
private static DimFilter buildTimeFloorFilter(
final String column,
final Granularity granularity,
final SqlKind operatorKind,
final long rhsMillis
)
{
final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC);
final Interval rhsInterval = granularity.bucket(DateTimes.utc(rhsMillis));
// Is rhs aligned on granularity boundaries?
final boolean rhsAligned = rhsInterval.getStartMillis() == rhsMillis;
switch (operatorKind) {
case EQUALS:
return rhsAligned
? Bounds.interval(boundRefKey, rhsInterval)
: Filtration.matchNothing();
case NOT_EQUALS:
return rhsAligned
? new NotDimFilter(Bounds.interval(boundRefKey, rhsInterval))
: Filtration.matchEverything();
case GREATER_THAN:
return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
case GREATER_THAN_OR_EQUAL:
return rhsAligned
? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
: Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
case LESS_THAN:
return rhsAligned
? Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
: Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
case LESS_THAN_OR_EQUAL:
return Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
default:
throw new IllegalStateException("WTF?! Shouldn't have got here...");
}
}
}

View File

@ -126,7 +126,8 @@ public class CastOperatorConversion implements SqlOperatorConversion
// Floor to day when casting to DATE.
return TimeFloorOperatorConversion.applyTimestampFloor(
typeCastExpression,
new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone())
new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone()),
plannerContext.getExprMacroTable()
);
} else {
return typeCastExpression;
@ -153,7 +154,8 @@ public class CastOperatorConversion implements SqlOperatorConversion
if (toType == SqlTypeName.DATE) {
return TimeFloorOperatorConversion.applyTimestampFloor(
timestampExpression,
new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone())
new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone()),
plannerContext.getExprMacroTable()
);
} else if (toType == SqlTypeName.TIMESTAMP) {
return timestampExpression;

View File

@ -73,7 +73,11 @@ public class FloorOperatorConversion implements SqlOperatorConversion
return null;
}
return TimeFloorOperatorConversion.applyTimestampFloor(druidExpression, granularity);
return TimeFloorOperatorConversion.applyTimestampFloor(
druidExpression,
granularity,
plannerContext.getExprMacroTable()
);
} else {
// WTF? FLOOR with 3 arguments?
return null;

View File

@ -22,9 +22,10 @@ package io.druid.sql.calcite.expression.builtin;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import io.druid.java.util.common.granularity.PeriodGranularity;
import io.druid.math.expr.ExprMacroTable;
import io.druid.query.expression.TimestampFloorExprMacro;
import io.druid.sql.calcite.expression.DruidExpression;
import io.druid.sql.calcite.expression.Expressions;
import io.druid.sql.calcite.expression.ExtractionFns;
import io.druid.sql.calcite.expression.OperatorConversions;
import io.druid.sql.calcite.expression.SqlOperatorConversion;
import io.druid.sql.calcite.planner.Calcites;
@ -44,6 +45,7 @@ import org.joda.time.DateTimeZone;
import org.joda.time.Period;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class TimeFloorOperatorConversion implements SqlOperatorConversion
@ -58,28 +60,52 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
public static DruidExpression applyTimestampFloor(
final DruidExpression input,
final PeriodGranularity granularity
final PeriodGranularity granularity,
final ExprMacroTable macroTable
)
{
Preconditions.checkNotNull(input, "input");
Preconditions.checkNotNull(granularity, "granularity");
return input.map(
simpleExtraction -> simpleExtraction.cascade(ExtractionFns.fromQueryGranularity(granularity)),
expression -> DruidExpression.functionCall(
// Collapse floor chains if possible. Useful for constructs like CAST(FLOOR(__time TO QUARTER) AS DATE).
if (granularity.getPeriod().equals(Period.days(1))) {
final TimestampFloorExprMacro.TimestampFloorExpr floorExpr = Expressions.asTimestampFloorExpr(
input,
macroTable
);
if (floorExpr != null) {
final PeriodGranularity inputGranularity = floorExpr.getGranularity();
if (Objects.equals(inputGranularity.getTimeZone(), granularity.getTimeZone())
&& Objects.equals(inputGranularity.getOrigin(), granularity.getOrigin())
&& periodIsDayMultiple(inputGranularity.getPeriod())) {
return input;
}
}
}
return DruidExpression.fromFunctionCall(
"timestamp_floor",
ImmutableList.of(
expression,
input.getExpression(),
DruidExpression.stringLiteral(granularity.getPeriod().toString()),
DruidExpression.numberLiteral(
granularity.getOrigin() == null ? null : granularity.getOrigin().getMillis()
),
DruidExpression.stringLiteral(granularity.getTimeZone().toString())
).stream().map(DruidExpression::fromExpression).collect(Collectors.toList())
)
);
}
private static boolean periodIsDayMultiple(final Period period)
{
return period.getMillis() == 0
&& period.getSeconds() == 0
&& period.getMinutes() == 0
&& period.getHours() == 0
&& (period.getDays() > 0 || period.getWeeks() > 0 || period.getMonths() > 0 || period.getYears() > 0);
}
@Override
public SqlOperator calciteOperator()
{
@ -117,7 +143,7 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
? DateTimeZone.forID(RexLiteral.stringValue(operands.get(3)))
: plannerContext.getTimeZone();
final PeriodGranularity granularity = new PeriodGranularity(period, origin, timeZone);
return applyTimestampFloor(druidExpressions.get(0), granularity);
return applyTimestampFloor(druidExpressions.get(0), granularity, plannerContext.getExprMacroTable());
} else {
// Granularity is dynamic
return DruidExpression.fromFunctionCall("timestamp_floor", druidExpressions);

View File

@ -64,7 +64,6 @@ import io.druid.sql.calcite.aggregation.Aggregation;
import io.druid.sql.calcite.aggregation.DimensionExpression;
import io.druid.sql.calcite.expression.DruidExpression;
import io.druid.sql.calcite.expression.Expressions;
import io.druid.sql.calcite.expression.ExtractionFns;
import io.druid.sql.calcite.filtration.Filtration;
import io.druid.sql.calcite.planner.Calcites;
import io.druid.sql.calcite.planner.PlannerContext;
@ -537,14 +536,16 @@ public class DruidQuery
return toExprType.equals(fromExprType);
}
public VirtualColumns getVirtualColumns(final ExprMacroTable macroTable)
public VirtualColumns getVirtualColumns(final ExprMacroTable macroTable, final boolean includeDimensions)
{
final List<VirtualColumn> retVal = new ArrayList<>();
if (grouping != null) {
if (includeDimensions) {
for (DimensionExpression dimensionExpression : grouping.getDimensions()) {
retVal.addAll(dimensionExpression.getVirtualColumns(macroTable));
}
}
for (Aggregation aggregation : grouping.getAggregations()) {
retVal.addAll(aggregation.getVirtualColumns());
@ -653,14 +654,15 @@ public class DruidQuery
queryGranularity = Granularities.ALL;
descending = false;
} else if (grouping.getDimensions().size() == 1) {
final DimensionSpec dimensionSpec = Iterables.getOnlyElement(grouping.getDimensions()).toDimensionSpec();
final Granularity gran = ExtractionFns.toQueryGranularity(dimensionSpec.getExtractionFn());
final DimensionExpression dimensionExpression = Iterables.getOnlyElement(grouping.getDimensions());
queryGranularity = Expressions.toQueryGranularity(
dimensionExpression.getDruidExpression(),
plannerContext.getExprMacroTable()
);
if (gran == null || !dimensionSpec.getDimension().equals(Column.TIME_COLUMN_NAME)) {
if (queryGranularity == null) {
// Timeseries only applies if the single dimension is granular __time.
return null;
} else {
queryGranularity = gran;
}
if (limitSpec != null) {
@ -677,7 +679,7 @@ public class DruidQuery
// wouldn't matter anyway).
final OrderByColumnSpec firstOrderBy = limitSpec.getColumns().get(0);
if (firstOrderBy.getDimension().equals(dimensionSpec.getOutputName())) {
if (firstOrderBy.getDimension().equals(dimensionExpression.getOutputName())) {
// Order by time.
descending = firstOrderBy.getDirection() == OrderByColumnSpec.Direction.DESCENDING;
} else {
@ -703,7 +705,7 @@ public class DruidQuery
dataSource,
filtration.getQuerySegmentSpec(),
descending,
getVirtualColumns(plannerContext.getExprMacroTable()),
getVirtualColumns(plannerContext.getExprMacroTable(), false),
filtration.getDimFilter(),
queryGranularity,
grouping.getAggregatorFactories(),
@ -768,7 +770,7 @@ public class DruidQuery
return new TopNQuery(
dataSource,
getVirtualColumns(plannerContext.getExprMacroTable()),
getVirtualColumns(plannerContext.getExprMacroTable(), true),
dimensionSpec,
topNMetricSpec,
limitSpec.getLimit(),
@ -798,7 +800,7 @@ public class DruidQuery
return new GroupByQuery(
dataSource,
filtration.getQuerySegmentSpec(),
getVirtualColumns(plannerContext.getExprMacroTable()),
getVirtualColumns(plannerContext.getExprMacroTable(), true),
filtration.getDimFilter(),
Granularities.ALL,
grouping.getDimensionSpecs(),
@ -927,7 +929,7 @@ public class DruidQuery
Granularities.ALL,
ImmutableList.of(new DefaultDimensionSpec(dummyColumn, dummyColumn)),
metrics.stream().sorted().distinct().collect(Collectors.toList()),
getVirtualColumns(plannerContext.getExprMacroTable()),
getVirtualColumns(plannerContext.getExprMacroTable(), true),
pagingSpec,
ImmutableSortedMap.copyOf(plannerContext.getQueryContext())
);

View File

@ -59,7 +59,6 @@ import io.druid.query.extraction.CascadeExtractionFn;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.extraction.RegexDimExtractionFn;
import io.druid.query.extraction.SubstringDimExtractionFn;
import io.druid.query.extraction.TimeFormatExtractionFn;
import io.druid.query.filter.AndDimFilter;
import io.druid.query.filter.BoundDimFilter;
import io.druid.query.filter.DimFilter;
@ -3312,6 +3311,9 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"cnt\",'P1Y','','UTC')", ValueType.LONG)
)
.setDimFilter(
BOUND(
"cnt",
@ -3323,14 +3325,7 @@ public class CalciteQueryTest
StringComparators.NUMERIC
)
)
.setDimensions(DIMS(
new ExtractionDimensionSpec(
"cnt",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.YEAR, true)
)
))
.setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG)))
.setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0")))
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
@ -3827,13 +3822,15 @@ public class CalciteQueryTest
)
.setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN(
"_d0:v",
"timestamp_floor(\"a0\",'PT1H','','UTC')",
ValueType.LONG
)
)
.setDimensions(DIMS(
new ExtractionDimensionSpec(
"a0",
"_d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.HOUR, true)
),
new DefaultDimensionSpec("_d0:v", "_d0", ValueType.LONG),
new DefaultDimensionSpec("d0", "_d1", ValueType.STRING)
))
.setAggregatorSpecs(AGGS(
@ -5078,14 +5075,16 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN(
"d0:v",
"timestamp_floor(\"__time\",'P1Y','','UTC')",
ValueType.LONG
)
)
.setDimensions(
DIMS(
new ExtractionDimensionSpec(
"__time",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.YEAR, true)
),
new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG),
new DefaultDimensionSpec("dim2", "d1")
)
)
@ -5652,6 +5651,32 @@ public class CalciteQueryTest
);
}
@Test
public void testTimeseriesUsingFloorPlusCastAsDate() throws Exception
{
testQuery(
"SELECT SUM(cnt), dt FROM (\n"
+ " SELECT CAST(FLOOR(__time TO QUARTER) AS DATE) AS dt,\n"
+ " cnt FROM druid.foo\n"
+ ") AS x\n"
+ "GROUP BY dt\n"
+ "ORDER BY dt",
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
.intervals(QSS(Filtration.eternity()))
.granularity(new PeriodGranularity(Period.months(3), null, DateTimeZone.UTC))
.aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.context(TIMESERIES_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{3L, D("2000-01-01")},
new Object[]{3L, D("2001-01-01")}
)
);
}
@Test
public void testTimeseriesDescending() throws Exception
{
@ -5858,16 +5883,14 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setDimensions(
DIMS(
new ExtractionDimensionSpec(
"__time",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN(
"d0:v",
"timestamp_floor(\"__time\",'P1M','','UTC')",
ValueType.LONG
)
)
.setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG)))
.setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.setLimitSpec(
new DefaultLimitSpec(
@ -5906,14 +5929,10 @@ public class CalciteQueryTest
.dataSource(CalciteTests.DATASOURCE1)
.intervals(QSS(Filtration.eternity()))
.granularity(Granularities.ALL)
.dimension(
new ExtractionDimensionSpec(
"__time",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
.virtualColumns(
EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M','','UTC')", ValueType.LONG)
)
.dimension(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))
.aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC))
.threshold(1)
@ -5943,14 +5962,10 @@ public class CalciteQueryTest
.dataSource(CalciteTests.DATASOURCE1)
.intervals(QSS(Filtration.eternity()))
.granularity(Granularities.ALL)
.dimension(
new ExtractionDimensionSpec(
"__time",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
.virtualColumns(
EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M','','UTC')", ValueType.LONG)
)
.dimension(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))
.aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC))
.threshold(1)
@ -5976,15 +5991,17 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN(
"d1:v",
"timestamp_floor(\"__time\",'P1M','','UTC')",
ValueType.LONG
)
)
.setDimensions(
DIMS(
new DefaultDimensionSpec("dim2", "d0"),
new ExtractionDimensionSpec(
"__time",
"d1",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
new DefaultDimensionSpec("d1:v", "d1", ValueType.LONG)
)
)
.setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))

View File

@ -22,12 +22,9 @@ package io.druid.sql.calcite.expression;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.java.util.common.granularity.PeriodGranularity;
import io.druid.math.expr.ExprEval;
import io.druid.math.expr.Parser;
import io.druid.query.extraction.RegexDimExtractionFn;
import io.druid.query.extraction.TimeFormatExtractionFn;
import io.druid.segment.column.ValueType;
import io.druid.server.security.AuthTestUtils;
import io.druid.sql.calcite.expression.builtin.DateTruncOperatorConversion;
@ -403,19 +400,7 @@ public class ExpressionsTest
rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.TIMESTAMP)),
rexBuilder.makeLiteral("America/Los_Angeles")
),
DruidExpression.of(
SimpleExtraction.of(
"t",
new TimeFormatExtractionFn(
null,
null,
null,
new PeriodGranularity(Period.days(1), null, LOS_ANGELES),
true
)
),
"timestamp_floor(\"t\",'P1D','','America/Los_Angeles')"
),
DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','America/Los_Angeles')"),
DateTimes.of("2000-02-02T08:00:00").getMillis()
);
}
@ -431,13 +416,7 @@ public class ExpressionsTest
inputRef("t"),
rexBuilder.makeFlag(TimeUnitRange.YEAR)
),
DruidExpression.of(
SimpleExtraction.of(
"t",
new TimeFormatExtractionFn(null, null, null, Granularities.YEAR, true)
),
"timestamp_floor(\"t\",'P1Y','','UTC')"
),
DruidExpression.fromExpression("timestamp_floor(\"t\",'P1Y','','UTC')"),
DateTimes.of("2000").getMillis()
);
}
@ -735,10 +714,7 @@ public class ExpressionsTest
typeFactory.createSqlType(SqlTypeName.DATE),
inputRef("t")
),
DruidExpression.of(
SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)),
"timestamp_floor(\"t\",'P1D','','UTC')"
),
DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
DateTimes.of("2000-02-03").getMillis()
);
@ -779,10 +755,7 @@ public class ExpressionsTest
inputRef("t")
)
),
DruidExpression.of(
SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)),
"timestamp_floor(\"t\",'P1D','','UTC')"
),
DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
DateTimes.of("2000-02-03").getMillis()
);
}