SQL: Improve translation of time floor expressions. (#5107)

* SQL: Improve translation of time floor expressions.

The main change is to TimeFloorOperatorConversion.applyTimestampFloor.

- Prefer timestamp_floor expressions to timeFormat extractionFns, to
  avoid turning things into strings when it isn't necessary.
- Collapse CAST(FLOOR(X TO Y) AS DATE) to FLOOR(X TO Y) if appropriate.

* Fix tests.
This commit is contained in:
Gian Merlino 2017-11-29 12:06:03 -08:00 committed by Fangjin Yang
parent 55d894e104
commit 5f6bdd940b
10 changed files with 347 additions and 160 deletions

View File

@ -35,6 +35,7 @@ import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree; import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker; import org.antlr.v4.runtime.tree.ParseTreeWalker;
import javax.annotation.Nullable;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -147,6 +148,16 @@ public class Parser
return Lists.newArrayList(found); return Lists.newArrayList(found);
} }
@Nullable
public static String getIdentifierIfIdentifier(Expr expr)
{
if (expr instanceof IdentifierExpr) {
return expr.toString();
} else {
return null;
}
}
public static Expr.ObjectBinding withMap(final Map<String, ?> bindings) public static Expr.ObjectBinding withMap(final Map<String, ?> bindings)
{ {
return bindings::get; return bindings::get;

View File

@ -28,6 +28,8 @@ import org.joda.time.DateTimeZone;
import org.joda.time.Period; import org.joda.time.Period;
import org.joda.time.chrono.ISOChronology; import org.joda.time.chrono.ISOChronology;
import javax.annotation.Nullable;
public class ExprUtils public class ExprUtils
{ {
private static final Expr.ObjectBinding NIL_BINDINGS = name -> null; private static final Expr.ObjectBinding NIL_BINDINGS = name -> null;
@ -49,8 +51,8 @@ public class ExprUtils
public static PeriodGranularity toPeriodGranularity( public static PeriodGranularity toPeriodGranularity(
final Expr periodArg, final Expr periodArg,
final Expr originArg, @Nullable final Expr originArg,
final Expr timeZoneArg, @Nullable final Expr timeZoneArg,
final Expr.ObjectBinding bindings final Expr.ObjectBinding bindings
) )
{ {

View File

@ -21,7 +21,6 @@ package io.druid.query.expression;
import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.IAE; import io.druid.java.util.common.IAE;
import io.druid.java.util.common.granularity.Granularity;
import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.granularity.PeriodGranularity;
import io.druid.math.expr.Expr; import io.druid.math.expr.Expr;
import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprEval;
@ -52,7 +51,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
} }
} }
private static PeriodGranularity getGranularity(final List<Expr> args, final Expr.ObjectBinding bindings) private static PeriodGranularity computeGranularity(final List<Expr> args, final Expr.ObjectBinding bindings)
{ {
return ExprUtils.toPeriodGranularity( return ExprUtils.toPeriodGranularity(
args.get(1), args.get(1),
@ -62,15 +61,31 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
); );
} }
private static class TimestampFloorExpr implements Expr public static class TimestampFloorExpr implements Expr
{ {
private final Expr arg; private final Expr arg;
private final Granularity granularity; private final PeriodGranularity granularity;
public TimestampFloorExpr(final List<Expr> args) public TimestampFloorExpr(final List<Expr> args)
{ {
this.arg = args.get(0); this.arg = args.get(0);
this.granularity = getGranularity(args, ExprUtils.nilBindings()); this.granularity = computeGranularity(args, ExprUtils.nilBindings());
}
/**
* Exposed for Druid SQL: this is used by Expressions.toQueryGranularity.
*/
public Expr getArg()
{
return arg;
}
/**
* Exposed for Druid SQL: this is used by Expressions.toQueryGranularity.
*/
public PeriodGranularity getGranularity()
{
return granularity;
} }
@Nonnull @Nonnull
@ -88,7 +103,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
} }
} }
private static class TimestampFloorDynamicExpr implements Expr public static class TimestampFloorDynamicExpr implements Expr
{ {
private final List<Expr> args; private final List<Expr> args;
@ -101,7 +116,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
@Override @Override
public ExprEval eval(final ObjectBinding bindings) public ExprEval eval(final ObjectBinding bindings)
{ {
final PeriodGranularity granularity = getGranularity(args, bindings); final PeriodGranularity granularity = computeGranularity(args, bindings);
return ExprEval.of(granularity.bucketStart(DateTimes.utc(args.get(0).eval(bindings).asLong())).getMillis()); return ExprEval.of(granularity.bucketStart(DateTimes.utc(args.get(0).eval(bindings).asLong())).getMillis());
} }

View File

@ -25,7 +25,11 @@ import com.google.common.collect.Lists;
import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.ISE; import io.druid.java.util.common.ISE;
import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.Granularity;
import io.druid.math.expr.Expr;
import io.druid.math.expr.ExprMacroTable;
import io.druid.math.expr.ExprType; import io.druid.math.expr.ExprType;
import io.druid.math.expr.Parser;
import io.druid.query.expression.TimestampFloorExprMacro;
import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.ExtractionFn;
import io.druid.query.extraction.TimeFormatExtractionFn; import io.druid.query.extraction.TimeFormatExtractionFn;
import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.AndDimFilter;
@ -334,14 +338,55 @@ public class Expressions
flip = true; flip = true;
} }
// Flip operator, maybe.
final SqlKind flippedKind;
if (flip) {
switch (kind) {
case EQUALS:
case NOT_EQUALS:
flippedKind = kind;
break;
case GREATER_THAN:
flippedKind = SqlKind.LESS_THAN;
break;
case GREATER_THAN_OR_EQUAL:
flippedKind = SqlKind.LESS_THAN_OR_EQUAL;
break;
case LESS_THAN:
flippedKind = SqlKind.GREATER_THAN;
break;
case LESS_THAN_OR_EQUAL:
flippedKind = SqlKind.GREATER_THAN_OR_EQUAL;
break;
default:
throw new ISE("WTF?! Kind[%s] not expected here", kind);
}
} else {
flippedKind = kind;
}
// rhs must be a literal // rhs must be a literal
if (rhs.getKind() != SqlKind.LITERAL) { if (rhs.getKind() != SqlKind.LITERAL) {
return null; return null;
} }
// lhs must be translatable to a SimpleExtraction to be simple-filterable // Translate lhs to a DruidExpression.
final DruidExpression lhsExpression = toDruidExpression(plannerContext, rowSignature, lhs); final DruidExpression lhsExpression = toDruidExpression(plannerContext, rowSignature, lhs);
if (lhsExpression == null || !lhsExpression.isSimpleExtraction()) { if (lhsExpression == null) {
return null;
}
// Special handling for filters on FLOOR(__time TO granularity).
final Granularity queryGranularity = toQueryGranularity(lhsExpression, plannerContext.getExprMacroTable());
if (queryGranularity != null) {
// lhs is FLOOR(__time TO granularity); rhs must be a timestamp
final long rhsMillis = Calcites.calciteDateTimeLiteralToJoda(rhs, plannerContext.getTimeZone()).getMillis();
return buildTimeFloorFilter(Column.TIME_COLUMN_NAME, queryGranularity, flippedKind, rhsMillis);
}
// In the general case, lhs must be translatable to a SimpleExtraction to be simple-filterable.
if (!lhsExpression.isSimpleExtraction()) {
return null; return null;
} }
@ -364,28 +409,29 @@ public class Expressions
// Create a BoundRefKey that strips the extractionFn and compares __time as a number. // Create a BoundRefKey that strips the extractionFn and compares __time as a number.
final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC); final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC);
if (kind == SqlKind.EQUALS) { switch (flippedKind) {
return rhsAligned case EQUALS:
? Bounds.interval(boundRefKey, rhsInterval) return rhsAligned
: Filtration.matchNothing(); ? Bounds.interval(boundRefKey, rhsInterval)
} else if (kind == SqlKind.NOT_EQUALS) { : Filtration.matchNothing();
return rhsAligned case NOT_EQUALS:
? new NotDimFilter(Bounds.interval(boundRefKey, rhsInterval)) return rhsAligned
: Filtration.matchEverything(); ? new NotDimFilter(Bounds.interval(boundRefKey, rhsInterval))
} else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) { : Filtration.matchEverything();
return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); case GREATER_THAN:
} else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) { return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
return rhsAligned case GREATER_THAN_OR_EQUAL:
? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getStartMillis())) return rhsAligned
: Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); ? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
} else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) { : Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
return rhsAligned case LESS_THAN:
? Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getStartMillis())) return rhsAligned
: Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); ? Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
} else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) { : Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
return Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); case LESS_THAN_OR_EQUAL:
} else { return Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
throw new IllegalStateException("WTF?! Shouldn't have got here..."); default:
throw new IllegalStateException("WTF?! Shouldn't have got here...");
} }
} }
} }
@ -414,20 +460,27 @@ public class Expressions
final DimFilter filter; final DimFilter filter;
// Always use BoundDimFilters, to simplify filter optimization later (it helps to remember the comparator). // Always use BoundDimFilters, to simplify filter optimization later (it helps to remember the comparator).
if (kind == SqlKind.EQUALS) { switch (flippedKind) {
filter = Bounds.equalTo(boundRefKey, val); case EQUALS:
} else if (kind == SqlKind.NOT_EQUALS) { filter = Bounds.equalTo(boundRefKey, val);
filter = new NotDimFilter(Bounds.equalTo(boundRefKey, val)); break;
} else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) { case NOT_EQUALS:
filter = Bounds.greaterThan(boundRefKey, val); filter = new NotDimFilter(Bounds.equalTo(boundRefKey, val));
} else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) { break;
filter = Bounds.greaterThanOrEqualTo(boundRefKey, val); case GREATER_THAN:
} else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) { filter = Bounds.greaterThan(boundRefKey, val);
filter = Bounds.lessThan(boundRefKey, val); break;
} else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) { case GREATER_THAN_OR_EQUAL:
filter = Bounds.lessThanOrEqualTo(boundRefKey, val); filter = Bounds.greaterThanOrEqualTo(boundRefKey, val);
} else { break;
throw new IllegalStateException("WTF?! Shouldn't have got here..."); case LESS_THAN:
filter = Bounds.lessThan(boundRefKey, val);
break;
case LESS_THAN_OR_EQUAL:
filter = Bounds.lessThanOrEqualTo(boundRefKey, val);
break;
default:
throw new IllegalStateException("WTF?! Shouldn't have got here...");
} }
return filter; return filter;
@ -482,4 +535,86 @@ public class Expressions
? null ? null
: new ExpressionDimFilter(druidExpression.getExpression(), plannerContext.getExprMacroTable()); : new ExpressionDimFilter(druidExpression.getExpression(), plannerContext.getExprMacroTable());
} }
/**
* Converts an expression to a Granularity, if possible. This is possible if, and only if, the expression
* is a timestamp_floor function on the __time column with literal parameters for period, origin, and timeZone.
*
* @return granularity or null if not possible
*/
@Nullable
public static Granularity toQueryGranularity(final DruidExpression expression, final ExprMacroTable macroTable)
{
final TimestampFloorExprMacro.TimestampFloorExpr expr = asTimestampFloorExpr(expression, macroTable);
if (expr == null) {
return null;
}
final Expr arg = expr.getArg();
final Granularity granularity = expr.getGranularity();
if (Column.TIME_COLUMN_NAME.equals(Parser.getIdentifierIfIdentifier(arg))) {
return granularity;
} else {
return null;
}
}
@Nullable
public static TimestampFloorExprMacro.TimestampFloorExpr asTimestampFloorExpr(
final DruidExpression expression,
final ExprMacroTable macroTable
)
{
final Expr expr = Parser.parse(expression.getExpression(), macroTable);
if (expr instanceof TimestampFloorExprMacro.TimestampFloorExpr) {
return (TimestampFloorExprMacro.TimestampFloorExpr) expr;
} else {
return null;
}
}
/**
* Build a filter for an expression like FLOOR(column TO granularity) [operator] rhsMillis
*/
private static DimFilter buildTimeFloorFilter(
final String column,
final Granularity granularity,
final SqlKind operatorKind,
final long rhsMillis
)
{
final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC);
final Interval rhsInterval = granularity.bucket(DateTimes.utc(rhsMillis));
// Is rhs aligned on granularity boundaries?
final boolean rhsAligned = rhsInterval.getStartMillis() == rhsMillis;
switch (operatorKind) {
case EQUALS:
return rhsAligned
? Bounds.interval(boundRefKey, rhsInterval)
: Filtration.matchNothing();
case NOT_EQUALS:
return rhsAligned
? new NotDimFilter(Bounds.interval(boundRefKey, rhsInterval))
: Filtration.matchEverything();
case GREATER_THAN:
return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
case GREATER_THAN_OR_EQUAL:
return rhsAligned
? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
: Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
case LESS_THAN:
return rhsAligned
? Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getStartMillis()))
: Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
case LESS_THAN_OR_EQUAL:
return Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis()));
default:
throw new IllegalStateException("WTF?! Shouldn't have got here...");
}
}
} }

View File

@ -126,7 +126,8 @@ public class CastOperatorConversion implements SqlOperatorConversion
// Floor to day when casting to DATE. // Floor to day when casting to DATE.
return TimeFloorOperatorConversion.applyTimestampFloor( return TimeFloorOperatorConversion.applyTimestampFloor(
typeCastExpression, typeCastExpression,
new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone()) new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone()),
plannerContext.getExprMacroTable()
); );
} else { } else {
return typeCastExpression; return typeCastExpression;
@ -153,7 +154,8 @@ public class CastOperatorConversion implements SqlOperatorConversion
if (toType == SqlTypeName.DATE) { if (toType == SqlTypeName.DATE) {
return TimeFloorOperatorConversion.applyTimestampFloor( return TimeFloorOperatorConversion.applyTimestampFloor(
timestampExpression, timestampExpression,
new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone()) new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone()),
plannerContext.getExprMacroTable()
); );
} else if (toType == SqlTypeName.TIMESTAMP) { } else if (toType == SqlTypeName.TIMESTAMP) {
return timestampExpression; return timestampExpression;

View File

@ -73,7 +73,11 @@ public class FloorOperatorConversion implements SqlOperatorConversion
return null; return null;
} }
return TimeFloorOperatorConversion.applyTimestampFloor(druidExpression, granularity); return TimeFloorOperatorConversion.applyTimestampFloor(
druidExpression,
granularity,
plannerContext.getExprMacroTable()
);
} else { } else {
// WTF? FLOOR with 3 arguments? // WTF? FLOOR with 3 arguments?
return null; return null;

View File

@ -22,9 +22,10 @@ package io.druid.sql.calcite.expression.builtin;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.granularity.PeriodGranularity;
import io.druid.math.expr.ExprMacroTable;
import io.druid.query.expression.TimestampFloorExprMacro;
import io.druid.sql.calcite.expression.DruidExpression; import io.druid.sql.calcite.expression.DruidExpression;
import io.druid.sql.calcite.expression.Expressions; import io.druid.sql.calcite.expression.Expressions;
import io.druid.sql.calcite.expression.ExtractionFns;
import io.druid.sql.calcite.expression.OperatorConversions; import io.druid.sql.calcite.expression.OperatorConversions;
import io.druid.sql.calcite.expression.SqlOperatorConversion; import io.druid.sql.calcite.expression.SqlOperatorConversion;
import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.Calcites;
@ -44,6 +45,7 @@ import org.joda.time.DateTimeZone;
import org.joda.time.Period; import org.joda.time.Period;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class TimeFloorOperatorConversion implements SqlOperatorConversion public class TimeFloorOperatorConversion implements SqlOperatorConversion
@ -58,28 +60,52 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
public static DruidExpression applyTimestampFloor( public static DruidExpression applyTimestampFloor(
final DruidExpression input, final DruidExpression input,
final PeriodGranularity granularity final PeriodGranularity granularity,
final ExprMacroTable macroTable
) )
{ {
Preconditions.checkNotNull(input, "input"); Preconditions.checkNotNull(input, "input");
Preconditions.checkNotNull(granularity, "granularity"); Preconditions.checkNotNull(granularity, "granularity");
return input.map( // Collapse floor chains if possible. Useful for constructs like CAST(FLOOR(__time TO QUARTER) AS DATE).
simpleExtraction -> simpleExtraction.cascade(ExtractionFns.fromQueryGranularity(granularity)), if (granularity.getPeriod().equals(Period.days(1))) {
expression -> DruidExpression.functionCall( final TimestampFloorExprMacro.TimestampFloorExpr floorExpr = Expressions.asTimestampFloorExpr(
"timestamp_floor", input,
ImmutableList.of( macroTable
expression, );
DruidExpression.stringLiteral(granularity.getPeriod().toString()),
DruidExpression.numberLiteral( if (floorExpr != null) {
granularity.getOrigin() == null ? null : granularity.getOrigin().getMillis() final PeriodGranularity inputGranularity = floorExpr.getGranularity();
), if (Objects.equals(inputGranularity.getTimeZone(), granularity.getTimeZone())
DruidExpression.stringLiteral(granularity.getTimeZone().toString()) && Objects.equals(inputGranularity.getOrigin(), granularity.getOrigin())
).stream().map(DruidExpression::fromExpression).collect(Collectors.toList()) && periodIsDayMultiple(inputGranularity.getPeriod())) {
) return input;
}
}
}
return DruidExpression.fromFunctionCall(
"timestamp_floor",
ImmutableList.of(
input.getExpression(),
DruidExpression.stringLiteral(granularity.getPeriod().toString()),
DruidExpression.numberLiteral(
granularity.getOrigin() == null ? null : granularity.getOrigin().getMillis()
),
DruidExpression.stringLiteral(granularity.getTimeZone().toString())
).stream().map(DruidExpression::fromExpression).collect(Collectors.toList())
); );
} }
private static boolean periodIsDayMultiple(final Period period)
{
return period.getMillis() == 0
&& period.getSeconds() == 0
&& period.getMinutes() == 0
&& period.getHours() == 0
&& (period.getDays() > 0 || period.getWeeks() > 0 || period.getMonths() > 0 || period.getYears() > 0);
}
@Override @Override
public SqlOperator calciteOperator() public SqlOperator calciteOperator()
{ {
@ -117,7 +143,7 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
? DateTimeZone.forID(RexLiteral.stringValue(operands.get(3))) ? DateTimeZone.forID(RexLiteral.stringValue(operands.get(3)))
: plannerContext.getTimeZone(); : plannerContext.getTimeZone();
final PeriodGranularity granularity = new PeriodGranularity(period, origin, timeZone); final PeriodGranularity granularity = new PeriodGranularity(period, origin, timeZone);
return applyTimestampFloor(druidExpressions.get(0), granularity); return applyTimestampFloor(druidExpressions.get(0), granularity, plannerContext.getExprMacroTable());
} else { } else {
// Granularity is dynamic // Granularity is dynamic
return DruidExpression.fromFunctionCall("timestamp_floor", druidExpressions); return DruidExpression.fromFunctionCall("timestamp_floor", druidExpressions);

View File

@ -64,7 +64,6 @@ import io.druid.sql.calcite.aggregation.Aggregation;
import io.druid.sql.calcite.aggregation.DimensionExpression; import io.druid.sql.calcite.aggregation.DimensionExpression;
import io.druid.sql.calcite.expression.DruidExpression; import io.druid.sql.calcite.expression.DruidExpression;
import io.druid.sql.calcite.expression.Expressions; import io.druid.sql.calcite.expression.Expressions;
import io.druid.sql.calcite.expression.ExtractionFns;
import io.druid.sql.calcite.filtration.Filtration; import io.druid.sql.calcite.filtration.Filtration;
import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.Calcites;
import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.planner.PlannerContext;
@ -537,13 +536,15 @@ public class DruidQuery
return toExprType.equals(fromExprType); return toExprType.equals(fromExprType);
} }
public VirtualColumns getVirtualColumns(final ExprMacroTable macroTable) public VirtualColumns getVirtualColumns(final ExprMacroTable macroTable, final boolean includeDimensions)
{ {
final List<VirtualColumn> retVal = new ArrayList<>(); final List<VirtualColumn> retVal = new ArrayList<>();
if (grouping != null) { if (grouping != null) {
for (DimensionExpression dimensionExpression : grouping.getDimensions()) { if (includeDimensions) {
retVal.addAll(dimensionExpression.getVirtualColumns(macroTable)); for (DimensionExpression dimensionExpression : grouping.getDimensions()) {
retVal.addAll(dimensionExpression.getVirtualColumns(macroTable));
}
} }
for (Aggregation aggregation : grouping.getAggregations()) { for (Aggregation aggregation : grouping.getAggregations()) {
@ -653,14 +654,15 @@ public class DruidQuery
queryGranularity = Granularities.ALL; queryGranularity = Granularities.ALL;
descending = false; descending = false;
} else if (grouping.getDimensions().size() == 1) { } else if (grouping.getDimensions().size() == 1) {
final DimensionSpec dimensionSpec = Iterables.getOnlyElement(grouping.getDimensions()).toDimensionSpec(); final DimensionExpression dimensionExpression = Iterables.getOnlyElement(grouping.getDimensions());
final Granularity gran = ExtractionFns.toQueryGranularity(dimensionSpec.getExtractionFn()); queryGranularity = Expressions.toQueryGranularity(
dimensionExpression.getDruidExpression(),
plannerContext.getExprMacroTable()
);
if (gran == null || !dimensionSpec.getDimension().equals(Column.TIME_COLUMN_NAME)) { if (queryGranularity == null) {
// Timeseries only applies if the single dimension is granular __time. // Timeseries only applies if the single dimension is granular __time.
return null; return null;
} else {
queryGranularity = gran;
} }
if (limitSpec != null) { if (limitSpec != null) {
@ -677,7 +679,7 @@ public class DruidQuery
// wouldn't matter anyway). // wouldn't matter anyway).
final OrderByColumnSpec firstOrderBy = limitSpec.getColumns().get(0); final OrderByColumnSpec firstOrderBy = limitSpec.getColumns().get(0);
if (firstOrderBy.getDimension().equals(dimensionSpec.getOutputName())) { if (firstOrderBy.getDimension().equals(dimensionExpression.getOutputName())) {
// Order by time. // Order by time.
descending = firstOrderBy.getDirection() == OrderByColumnSpec.Direction.DESCENDING; descending = firstOrderBy.getDirection() == OrderByColumnSpec.Direction.DESCENDING;
} else { } else {
@ -703,7 +705,7 @@ public class DruidQuery
dataSource, dataSource,
filtration.getQuerySegmentSpec(), filtration.getQuerySegmentSpec(),
descending, descending,
getVirtualColumns(plannerContext.getExprMacroTable()), getVirtualColumns(plannerContext.getExprMacroTable(), false),
filtration.getDimFilter(), filtration.getDimFilter(),
queryGranularity, queryGranularity,
grouping.getAggregatorFactories(), grouping.getAggregatorFactories(),
@ -768,7 +770,7 @@ public class DruidQuery
return new TopNQuery( return new TopNQuery(
dataSource, dataSource,
getVirtualColumns(plannerContext.getExprMacroTable()), getVirtualColumns(plannerContext.getExprMacroTable(), true),
dimensionSpec, dimensionSpec,
topNMetricSpec, topNMetricSpec,
limitSpec.getLimit(), limitSpec.getLimit(),
@ -798,7 +800,7 @@ public class DruidQuery
return new GroupByQuery( return new GroupByQuery(
dataSource, dataSource,
filtration.getQuerySegmentSpec(), filtration.getQuerySegmentSpec(),
getVirtualColumns(plannerContext.getExprMacroTable()), getVirtualColumns(plannerContext.getExprMacroTable(), true),
filtration.getDimFilter(), filtration.getDimFilter(),
Granularities.ALL, Granularities.ALL,
grouping.getDimensionSpecs(), grouping.getDimensionSpecs(),
@ -927,7 +929,7 @@ public class DruidQuery
Granularities.ALL, Granularities.ALL,
ImmutableList.of(new DefaultDimensionSpec(dummyColumn, dummyColumn)), ImmutableList.of(new DefaultDimensionSpec(dummyColumn, dummyColumn)),
metrics.stream().sorted().distinct().collect(Collectors.toList()), metrics.stream().sorted().distinct().collect(Collectors.toList()),
getVirtualColumns(plannerContext.getExprMacroTable()), getVirtualColumns(plannerContext.getExprMacroTable(), true),
pagingSpec, pagingSpec,
ImmutableSortedMap.copyOf(plannerContext.getQueryContext()) ImmutableSortedMap.copyOf(plannerContext.getQueryContext())
); );

View File

@ -59,7 +59,6 @@ import io.druid.query.extraction.CascadeExtractionFn;
import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.ExtractionFn;
import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.query.extraction.RegexDimExtractionFn;
import io.druid.query.extraction.SubstringDimExtractionFn; import io.druid.query.extraction.SubstringDimExtractionFn;
import io.druid.query.extraction.TimeFormatExtractionFn;
import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.AndDimFilter;
import io.druid.query.filter.BoundDimFilter; import io.druid.query.filter.BoundDimFilter;
import io.druid.query.filter.DimFilter; import io.druid.query.filter.DimFilter;
@ -3312,6 +3311,9 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1) .setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity())) .setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL) .setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"cnt\",'P1Y','','UTC')", ValueType.LONG)
)
.setDimFilter( .setDimFilter(
BOUND( BOUND(
"cnt", "cnt",
@ -3323,14 +3325,7 @@ public class CalciteQueryTest
StringComparators.NUMERIC StringComparators.NUMERIC
) )
) )
.setDimensions(DIMS( .setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG)))
new ExtractionDimensionSpec(
"cnt",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.YEAR, true)
)
))
.setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0"))) .setAggregatorSpecs(AGGS(new CountAggregatorFactory("a0")))
.setContext(QUERY_CONTEXT_DEFAULT) .setContext(QUERY_CONTEXT_DEFAULT)
.build() .build()
@ -3827,13 +3822,15 @@ public class CalciteQueryTest
) )
.setInterval(QSS(Filtration.eternity())) .setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL) .setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN(
"_d0:v",
"timestamp_floor(\"a0\",'PT1H','','UTC')",
ValueType.LONG
)
)
.setDimensions(DIMS( .setDimensions(DIMS(
new ExtractionDimensionSpec( new DefaultDimensionSpec("_d0:v", "_d0", ValueType.LONG),
"a0",
"_d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.HOUR, true)
),
new DefaultDimensionSpec("d0", "_d1", ValueType.STRING) new DefaultDimensionSpec("d0", "_d1", ValueType.STRING)
)) ))
.setAggregatorSpecs(AGGS( .setAggregatorSpecs(AGGS(
@ -5078,14 +5075,16 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1) .setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity())) .setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL) .setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN(
"d0:v",
"timestamp_floor(\"__time\",'P1Y','','UTC')",
ValueType.LONG
)
)
.setDimensions( .setDimensions(
DIMS( DIMS(
new ExtractionDimensionSpec( new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG),
"__time",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.YEAR, true)
),
new DefaultDimensionSpec("dim2", "d1") new DefaultDimensionSpec("dim2", "d1")
) )
) )
@ -5652,6 +5651,32 @@ public class CalciteQueryTest
); );
} }
@Test
public void testTimeseriesUsingFloorPlusCastAsDate() throws Exception
{
testQuery(
"SELECT SUM(cnt), dt FROM (\n"
+ " SELECT CAST(FLOOR(__time TO QUARTER) AS DATE) AS dt,\n"
+ " cnt FROM druid.foo\n"
+ ") AS x\n"
+ "GROUP BY dt\n"
+ "ORDER BY dt",
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
.intervals(QSS(Filtration.eternity()))
.granularity(new PeriodGranularity(Period.months(3), null, DateTimeZone.UTC))
.aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.context(TIMESERIES_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{3L, D("2000-01-01")},
new Object[]{3L, D("2001-01-01")}
)
);
}
@Test @Test
public void testTimeseriesDescending() throws Exception public void testTimeseriesDescending() throws Exception
{ {
@ -5858,16 +5883,14 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1) .setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity())) .setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL) .setGranularity(Granularities.ALL)
.setDimensions( .setVirtualColumns(
DIMS( EXPRESSION_VIRTUAL_COLUMN(
new ExtractionDimensionSpec( "d0:v",
"__time", "timestamp_floor(\"__time\",'P1M','','UTC')",
"d0", ValueType.LONG
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
) )
) )
.setDimensions(DIMS(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG)))
.setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.setLimitSpec( .setLimitSpec(
new DefaultLimitSpec( new DefaultLimitSpec(
@ -5906,14 +5929,10 @@ public class CalciteQueryTest
.dataSource(CalciteTests.DATASOURCE1) .dataSource(CalciteTests.DATASOURCE1)
.intervals(QSS(Filtration.eternity())) .intervals(QSS(Filtration.eternity()))
.granularity(Granularities.ALL) .granularity(Granularities.ALL)
.dimension( .virtualColumns(
new ExtractionDimensionSpec( EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M','','UTC')", ValueType.LONG)
"__time",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
) )
.dimension(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))
.aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC))
.threshold(1) .threshold(1)
@ -5943,14 +5962,10 @@ public class CalciteQueryTest
.dataSource(CalciteTests.DATASOURCE1) .dataSource(CalciteTests.DATASOURCE1)
.intervals(QSS(Filtration.eternity())) .intervals(QSS(Filtration.eternity()))
.granularity(Granularities.ALL) .granularity(Granularities.ALL)
.dimension( .virtualColumns(
new ExtractionDimensionSpec( EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M','','UTC')", ValueType.LONG)
"__time",
"d0",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
) )
.dimension(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))
.aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
.metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC))
.threshold(1) .threshold(1)
@ -5976,15 +5991,17 @@ public class CalciteQueryTest
.setDataSource(CalciteTests.DATASOURCE1) .setDataSource(CalciteTests.DATASOURCE1)
.setInterval(QSS(Filtration.eternity())) .setInterval(QSS(Filtration.eternity()))
.setGranularity(Granularities.ALL) .setGranularity(Granularities.ALL)
.setVirtualColumns(
EXPRESSION_VIRTUAL_COLUMN(
"d1:v",
"timestamp_floor(\"__time\",'P1M','','UTC')",
ValueType.LONG
)
)
.setDimensions( .setDimensions(
DIMS( DIMS(
new DefaultDimensionSpec("dim2", "d0"), new DefaultDimensionSpec("dim2", "d0"),
new ExtractionDimensionSpec( new DefaultDimensionSpec("d1:v", "d1", ValueType.LONG)
"__time",
"d1",
ValueType.LONG,
new TimeFormatExtractionFn(null, null, null, Granularities.MONTH, true)
)
) )
) )
.setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))

View File

@ -22,12 +22,9 @@ package io.druid.sql.calcite.expression;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.DateTimes;
import io.druid.java.util.common.granularity.Granularities;
import io.druid.java.util.common.granularity.PeriodGranularity;
import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprEval;
import io.druid.math.expr.Parser; import io.druid.math.expr.Parser;
import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.query.extraction.RegexDimExtractionFn;
import io.druid.query.extraction.TimeFormatExtractionFn;
import io.druid.segment.column.ValueType; import io.druid.segment.column.ValueType;
import io.druid.server.security.AuthTestUtils; import io.druid.server.security.AuthTestUtils;
import io.druid.sql.calcite.expression.builtin.DateTruncOperatorConversion; import io.druid.sql.calcite.expression.builtin.DateTruncOperatorConversion;
@ -403,19 +400,7 @@ public class ExpressionsTest
rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.TIMESTAMP)), rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.TIMESTAMP)),
rexBuilder.makeLiteral("America/Los_Angeles") rexBuilder.makeLiteral("America/Los_Angeles")
), ),
DruidExpression.of( DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','America/Los_Angeles')"),
SimpleExtraction.of(
"t",
new TimeFormatExtractionFn(
null,
null,
null,
new PeriodGranularity(Period.days(1), null, LOS_ANGELES),
true
)
),
"timestamp_floor(\"t\",'P1D','','America/Los_Angeles')"
),
DateTimes.of("2000-02-02T08:00:00").getMillis() DateTimes.of("2000-02-02T08:00:00").getMillis()
); );
} }
@ -431,13 +416,7 @@ public class ExpressionsTest
inputRef("t"), inputRef("t"),
rexBuilder.makeFlag(TimeUnitRange.YEAR) rexBuilder.makeFlag(TimeUnitRange.YEAR)
), ),
DruidExpression.of( DruidExpression.fromExpression("timestamp_floor(\"t\",'P1Y','','UTC')"),
SimpleExtraction.of(
"t",
new TimeFormatExtractionFn(null, null, null, Granularities.YEAR, true)
),
"timestamp_floor(\"t\",'P1Y','','UTC')"
),
DateTimes.of("2000").getMillis() DateTimes.of("2000").getMillis()
); );
} }
@ -735,10 +714,7 @@ public class ExpressionsTest
typeFactory.createSqlType(SqlTypeName.DATE), typeFactory.createSqlType(SqlTypeName.DATE),
inputRef("t") inputRef("t")
), ),
DruidExpression.of( DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)),
"timestamp_floor(\"t\",'P1D','','UTC')"
),
DateTimes.of("2000-02-03").getMillis() DateTimes.of("2000-02-03").getMillis()
); );
@ -779,10 +755,7 @@ public class ExpressionsTest
inputRef("t") inputRef("t")
) )
), ),
DruidExpression.of( DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)),
"timestamp_floor(\"t\",'P1D','','UTC')"
),
DateTimes.of("2000-02-03").getMillis() DateTimes.of("2000-02-03").getMillis()
); );
} }