SQL: Refactor binary date time functions (#47786)

Refactor DateTrunc and DatePart to use separate Pipe classes which
allows the removal of the BinaryDateOperation enum.

(cherry picked from commit a6075e7718dff94a90dbc0795dd924dcb7641092)
This commit is contained in:
Marios Trivyzas 2019-10-10 13:30:23 +02:00
parent 6a4bf5de2c
commit c1f30e34ff
15 changed files with 379 additions and 199 deletions

View File

@ -20,19 +20,15 @@ import java.util.Objects;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isDate;
import static org.elasticsearch.xpack.sql.expression.TypeResolutions.isString;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.BinaryDateTimeProcessor.BinaryDateOperation;
import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder;
public abstract class BinaryDateTimeFunction extends BinaryScalarFunction {
private final ZoneId zoneId;
private final BinaryDateOperation operation;
public BinaryDateTimeFunction(Source source, Expression datePart, Expression timestamp, ZoneId zoneId,
BinaryDateOperation operation) {
public BinaryDateTimeFunction(Source source, Expression datePart, Expression timestamp, ZoneId zoneId) {
super(source, datePart, timestamp);
this.zoneId = zoneId;
this.operation = operation;
}
@Override
@ -47,7 +43,7 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction {
if (datePartValue != null && resolveDateTimeField(datePartValue) == false) {
List<String> similar = findSimilarDateTimeFields(datePartValue);
if (similar.isEmpty()) {
return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases, found value [{}]",
return new TypeResolution(format(null, "first argument of [{}] must be one of {} or their aliases; found value [{}]",
sourceText(),
validDateTimeFieldValues(),
Expressions.name(left())));
@ -78,9 +74,11 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction {
@Override
protected Pipe makePipe() {
return new BinaryDateTimePipe(source(), this, Expressions.pipe(left()), Expressions.pipe(right()), zoneId, operation);
return createPipe(Expressions.pipe(left()), Expressions.pipe(right()), zoneId);
}
protected abstract Pipe createPipe(Pipe left, Pipe right, ZoneId zoneId);
@Override
public Nullability nullable() {
return Nullability.TRUE;
@ -101,7 +99,7 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction {
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), zoneId, operation);
return Objects.hash(super.hashCode(), zoneId);
}
@Override
@ -116,6 +114,6 @@ public abstract class BinaryDateTimeFunction extends BinaryScalarFunction {
return false;
}
BinaryDateTimeFunction that = (BinaryDateTimeFunction) o;
return zoneId.equals(that.zoneId) && operation == that.operation;
return zoneId.equals(that.zoneId);
}
}

View File

@ -9,50 +9,34 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
import java.util.Objects;
public class BinaryDateTimePipe extends BinaryPipe {
public abstract class BinaryDateTimePipe extends BinaryPipe {
private final ZoneId zoneId;
private final BinaryDateTimeProcessor.BinaryDateOperation operation;
public BinaryDateTimePipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId,
BinaryDateTimeProcessor.BinaryDateOperation operation) {
public BinaryDateTimePipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) {
super(source, expression, left, right);
this.zoneId = zoneId;
this.operation = operation;
}
ZoneId zoneId() {
return zoneId;
}
BinaryDateTimeProcessor.BinaryDateOperation operation() {
return operation;
}
@Override
protected NodeInfo<BinaryDateTimePipe> info() {
return NodeInfo.create(this, BinaryDateTimePipe::new, expression(), left(), right(), zoneId, operation);
}
@Override
protected BinaryPipe replaceChildren(Pipe left, Pipe right) {
return new BinaryDateTimePipe(source(), expression(), left, right, zoneId, operation);
}
@Override
public Processor asProcessor() {
return BinaryDateTimeProcessor.asProcessor(operation, left().asProcessor(), right().asProcessor(), zoneId);
return makeProcessor(left().asProcessor(), right().asProcessor(), zoneId);
}
protected abstract Processor makeProcessor(Processor left, Processor right, ZoneId zoneId);
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), zoneId, operation);
return Objects.hash(super.hashCode(), zoneId);
}
@Override
@ -67,7 +51,6 @@ public class BinaryDateTimePipe extends BinaryPipe {
return false;
}
BinaryDateTimePipe that = (BinaryDateTimePipe) o;
return Objects.equals(zoneId, that.zoneId) &&
operation == that.operation;
return Objects.equals(zoneId, that.zoneId);
}
}

View File

@ -15,16 +15,8 @@ import java.io.IOException;
import java.time.ZoneId;
import java.util.Objects;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.BinaryDateTimeProcessor.BinaryDateOperation.TRUNC;
public abstract class BinaryDateTimeProcessor extends BinaryProcessor {
// TODO: Remove and in favour of inheritance (subclasses which implement abstract methods)
public enum BinaryDateOperation {
TRUNC,
PART;
}
private final ZoneId zoneId;
public BinaryDateTimeProcessor(Processor source1, Processor source2, ZoneId zoneId) {
@ -48,28 +40,24 @@ public abstract class BinaryDateTimeProcessor extends BinaryProcessor {
@Override
protected abstract Object doProcess(Object left, Object right);
public static BinaryDateTimeProcessor asProcessor(BinaryDateOperation operation, Processor left, Processor right, ZoneId zoneId) {
if (operation == TRUNC) {
return new DateTruncProcessor(left, right, zoneId);
} else {
return new DatePartProcessor(left, right, zoneId);
}
}
@Override
public int hashCode() {
return Objects.hash(zoneId);
return Objects.hash(left(), right(), zoneId);
}
@Override
public boolean equals(Object o) {
if (this == o) {
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (o == null || getClass() != o.getClass()) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
BinaryDateTimeProcessor that = (BinaryDateTimeProcessor) o;
return zoneId.equals(that.zoneId);
BinaryDateTimeProcessor other = (BinaryDateTimeProcessor) obj;
return Objects.equals(left(), other.left())
&& Objects.equals(right(), other.right())
&& Objects.equals(zoneId(), other.zoneId());
}
}

View File

@ -9,6 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Nullability;
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.type.DataType;
@ -78,7 +79,7 @@ public class DatePart extends BinaryDateTimeFunction {
}
public DatePart(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) {
super(source, truncateTo, timestamp, zoneId, BinaryDateTimeProcessor.BinaryDateOperation.PART);
super(source, truncateTo, timestamp, zoneId);
}
@Override
@ -101,16 +102,6 @@ public class DatePart extends BinaryDateTimeFunction {
return Nullability.TRUE;
}
@Override
protected boolean resolveDateTimeField(String dateTimeField) {
return Part.resolve(dateTimeField) != null;
}
@Override
protected List<String> findSimilarDateTimeFields(String dateTimeField) {
return Part.findSimilar(dateTimeField);
}
@Override
protected String scriptMethodName() {
return "datePart";
@ -121,6 +112,21 @@ public class DatePart extends BinaryDateTimeFunction {
return DatePartProcessor.process(left().fold(), right().fold(), zoneId());
}
@Override
protected Pipe createPipe(Pipe left, Pipe right, ZoneId zoneId) {
return new DatePartPipe(source(), this, left, right, zoneId);
}
@Override
protected boolean resolveDateTimeField(String dateTimeField) {
return Part.resolve(dateTimeField) != null;
}
@Override
protected List<String> findSimilarDateTimeFields(String dateTimeField) {
return Part.findSimilar(dateTimeField);
}
@Override
protected List<String> validDateTimeFieldValues() {
return Part.VALID_VALUES;

View File

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
public class DatePartPipe extends BinaryDateTimePipe {
public DatePartPipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) {
super(source, expression, left, right, zoneId);
}
@Override
protected NodeInfo<DatePartPipe> info() {
return NodeInfo.create(this, DatePartPipe::new, expression(), left(), right(), zoneId());
}
@Override
protected DatePartPipe replaceChildren(Pipe left, Pipe right) {
return new DatePartPipe(source(), expression(), left, right, zoneId());
}
@Override
protected Processor makeProcessor(Processor left, Processor right, ZoneId zoneId) {
return new DatePartProcessor(left, right, zoneId);
}
}

View File

@ -34,36 +34,36 @@ public class DatePartProcessor extends BinaryDateTimeProcessor {
}
@Override
protected Object doProcess(Object left, Object right) {
return process(left, right, zoneId());
protected Object doProcess(Object part, Object timestamp) {
return process(part, timestamp, zoneId());
}
/**
* Used in Painless scripting
*/
public static Object process(Object source1, Object source2, ZoneId zoneId) {
if (source1 == null || source2 == null) {
public static Object process(Object part, Object timestamp, ZoneId zoneId) {
if (part == null || timestamp == null) {
return null;
}
if (source1 instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", source1);
if (part instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", part);
}
Part datePartField = Part.resolve((String) source1);
Part datePartField = Part.resolve((String) part);
if (datePartField == null) {
List<String> similar = Part.findSimilar((String) source1);
List<String> similar = Part.findSimilar((String) part);
if (similar.isEmpty()) {
throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]",
Part.values(), source1);
Part.values(), part);
} else {
throw new SqlIllegalArgumentException("Received value [{}] is not valid date part for extraction; " +
"did you mean {}?", source1, similar);
"did you mean {}?", part, similar);
}
}
if (source2 instanceof ZonedDateTime == false) {
throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", source2);
if (timestamp instanceof ZonedDateTime == false) {
throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", timestamp);
}
return datePartField.extract(((ZonedDateTime) source2).withZoneSameInstant(zoneId));
return datePartField.extract(((ZonedDateTime) timestamp).withZoneSameInstant(zoneId));
}
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Nullability;
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.type.DataType;
@ -21,9 +22,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.BinaryDateTimeProcessor.BinaryDateOperation.TRUNC;
import java.util.function.UnaryOperator;
public class DateTrunc extends BinaryDateTimeFunction {
@ -37,7 +36,7 @@ public class DateTrunc extends BinaryDateTimeFunction {
.with(ChronoField.MONTH_OF_YEAR, 1)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate().atStartOfDay(dt.getZone());
},"millennia"),
},"millennia"),
CENTURY(dt -> {
int year = dt.getYear();
int firstYearOfCentury = year - (year % 100);
@ -46,7 +45,7 @@ public class DateTrunc extends BinaryDateTimeFunction {
.with(ChronoField.MONTH_OF_YEAR, 1)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate().atStartOfDay(dt.getZone());
}, "centuries"),
}, "centuries"),
DECADE(dt -> {
int year = dt.getYear();
int firstYearOfDecade = year - (year % 10);
@ -55,7 +54,7 @@ public class DateTrunc extends BinaryDateTimeFunction {
.with(ChronoField.MONTH_OF_YEAR, 1)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate().atStartOfDay(dt.getZone());
}, "decades"),
}, "decades"),
YEAR(dt -> dt
.with(ChronoField.MONTH_OF_YEAR, 1)
.with(ChronoField.DAY_OF_MONTH, 1)
@ -68,14 +67,14 @@ public class DateTrunc extends BinaryDateTimeFunction {
.with(ChronoField.MONTH_OF_YEAR, firstMonthOfQuarter)
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate().atStartOfDay(dt.getZone());
}, "quarters", "qq", "q"),
}, "quarters", "qq", "q"),
MONTH(dt -> dt
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate().atStartOfDay(dt.getZone()),
.with(ChronoField.DAY_OF_MONTH, 1)
.toLocalDate().atStartOfDay(dt.getZone()),
"months", "mm", "m"),
WEEK(dt -> dt
.with(ChronoField.DAY_OF_WEEK, 1)
.toLocalDate().atStartOfDay(dt.getZone()),
.with(ChronoField.DAY_OF_WEEK, 1)
.toLocalDate().atStartOfDay(dt.getZone()),
"weeks", "wk", "ww"),
DAY(dt -> dt.toLocalDate().atStartOfDay(dt.getZone()), "days", "dd", "d"),
HOUR(dt -> {
@ -89,16 +88,16 @@ public class DateTrunc extends BinaryDateTimeFunction {
return dt.toLocalDate().atStartOfDay(dt.getZone())
.with(ChronoField.HOUR_OF_DAY, hour)
.with(ChronoField.MINUTE_OF_HOUR, minute);
}, "minutes", "mi", "n"),
}, "minutes", "mi", "n"),
SECOND(dt -> dt.with(ChronoField.NANO_OF_SECOND, 0), "seconds", "ss", "s"),
MILLISECOND(dt -> {
int micros = dt.get(ChronoField.MICRO_OF_SECOND);
return dt.with(ChronoField.MILLI_OF_SECOND, (micros / 1000));
}, "milliseconds", "ms"),
}, "milliseconds", "ms"),
MICROSECOND(dt -> {
int nanos = dt.getNano();
return dt.with(ChronoField.MICRO_OF_SECOND, (nanos / 1000));
}, "microseconds", "mcs"),
}, "microseconds", "mcs"),
NANOSECOND(dt -> dt, "nanoseconds", "ns");
private static final Map<String, Part> NAME_TO_PART;
@ -109,10 +108,10 @@ public class DateTrunc extends BinaryDateTimeFunction {
VALID_VALUES = DateTimeField.initializeValidValues(values());
}
private Function<ZonedDateTime, ZonedDateTime> truncateFunction;
private UnaryOperator<ZonedDateTime> truncateFunction;
private Set<String> aliases;
Part(Function<ZonedDateTime, ZonedDateTime> truncateFunction, String... aliases) {
Part(UnaryOperator<ZonedDateTime> truncateFunction, String... aliases) {
this.truncateFunction = truncateFunction;
this.aliases = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(aliases)));
}
@ -136,7 +135,7 @@ public class DateTrunc extends BinaryDateTimeFunction {
}
public DateTrunc(Source source, Expression truncateTo, Expression timestamp, ZoneId zoneId) {
super(source, truncateTo, timestamp, zoneId, TRUNC);
super(source, truncateTo, timestamp, zoneId);
}
@Override
@ -159,16 +158,6 @@ public class DateTrunc extends BinaryDateTimeFunction {
return Nullability.TRUE;
}
@Override
protected boolean resolveDateTimeField(String dateTimeField) {
return Part.resolve(dateTimeField) != null;
}
@Override
protected List<String> findSimilarDateTimeFields(String dateTimeField) {
return Part.findSimilar(dateTimeField);
}
@Override
protected String scriptMethodName() {
return "dateTrunc";
@ -179,6 +168,21 @@ public class DateTrunc extends BinaryDateTimeFunction {
return DateTruncProcessor.process(left().fold(), right().fold(), zoneId());
}
@Override
protected Pipe createPipe(Pipe left, Pipe right, ZoneId zoneId) {
return new DateTruncPipe(source(), this, left, right, zoneId);
}
@Override
protected boolean resolveDateTimeField(String dateTimeField) {
return Part.resolve(dateTimeField) != null;
}
@Override
protected List<String> findSimilarDateTimeFields(String dateTimeField) {
return Part.findSimilar(dateTimeField);
}
@Override
protected List<String> validDateTimeFieldValues() {
return Part.VALID_VALUES;

View File

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.expression.gen.processor.Processor;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
public class DateTruncPipe extends BinaryDateTimePipe {
public DateTruncPipe(Source source, Expression expression, Pipe left, Pipe right, ZoneId zoneId) {
super(source, expression, left, right, zoneId);
}
@Override
protected NodeInfo<DateTruncPipe> info() {
return NodeInfo.create(this, DateTruncPipe::new, expression(), left(), right(), zoneId());
}
@Override
protected DateTruncPipe replaceChildren(Pipe left, Pipe right) {
return new DateTruncPipe(source(), expression(), left, right, zoneId());
}
@Override
protected Processor makeProcessor(Processor left, Processor right, ZoneId zoneId) {
return new DateTruncProcessor(left, right, zoneId);
}
}

View File

@ -34,36 +34,36 @@ public class DateTruncProcessor extends BinaryDateTimeProcessor {
}
@Override
protected Object doProcess(Object left, Object right) {
return process(left, right, zoneId());
protected Object doProcess(Object truncateTo, Object timestamp) {
return process(truncateTo, timestamp, zoneId());
}
/**
* Used in Painless scripting
*/
public static Object process(Object source1, Object source2, ZoneId zoneId) {
if (source1 == null || source2 == null) {
public static Object process(Object truncateTo, Object timestamp, ZoneId zoneId) {
if (truncateTo == null || timestamp == null) {
return null;
}
if (source1 instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", source1);
if (truncateTo instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", truncateTo);
}
Part truncateDateField = Part.resolve((String) source1);
Part truncateDateField = Part.resolve((String) truncateTo);
if (truncateDateField == null) {
List<String> similar = Part.findSimilar((String) source1);
List<String> similar = Part.findSimilar((String) truncateTo);
if (similar.isEmpty()) {
throw new SqlIllegalArgumentException("A value of {} or their aliases is required; received [{}]",
Part.values(), source1);
Part.values(), truncateTo);
} else {
throw new SqlIllegalArgumentException("Received value [{}] is not valid date part for truncation; " +
"did you mean {}?", source1, similar);
"did you mean {}?", truncateTo, similar);
}
}
if (source2 instanceof ZonedDateTime == false) {
throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", source2);
if (timestamp instanceof ZonedDateTime == false) {
throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", timestamp);
}
return truncateDateField.truncate(((ZonedDateTime) source2).withZoneSameInstant(zoneId));
return truncateDateField.truncate(((ZonedDateTime) timestamp).withZoneSameInstant(zoneId));
}
}

View File

@ -80,7 +80,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
private LogicalPlan incompatibleAccept(String sql) {
return accept(incompatible(), sql);
}
public void testMissingIndex() {
assertEquals("1:17: Unknown index [missing]", error(IndexResolution.notFound("missing"), "SELECT foo FROM missing"));
}
@ -96,11 +96,11 @@ public class VerifierErrorMessagesTests extends ESTestCase {
public void testMissingColumnWithWildcard() {
assertEquals("1:8: Unknown column [xxx]", error("SELECT xxx.* FROM test"));
}
public void testMisspelledColumnWithWildcard() {
assertEquals("1:8: Unknown column [tex], did you mean [text]?", error("SELECT tex.* FROM test"));
}
public void testColumnWithNoSubFields() {
assertEquals("1:8: Cannot determine columns for [text.*]", error("SELECT text.* FROM test"));
}
@ -131,14 +131,14 @@ public class VerifierErrorMessagesTests extends ESTestCase {
"line 1:22: Unknown column [c]\n" +
"line 1:25: Unknown column [tex], did you mean [text]?", error("SELECT bool, a, b.*, c, tex.* FROM test"));
}
public void testMultipleColumnsWithWildcard2() {
assertEquals("1:8: Unknown column [tex], did you mean [text]?\n" +
"line 1:21: Unknown column [a]\n" +
"line 1:24: Unknown column [dat], did you mean [date]?\n" +
"line 1:31: Unknown column [c]", error("SELECT tex.*, bool, a, dat.*, c FROM test"));
}
public void testMultipleColumnsWithWildcard3() {
assertEquals("1:8: Unknown column [ate], did you mean [date]?\n" +
"line 1:21: Unknown column [keyw], did you mean [keyword]?\n" +
@ -210,7 +210,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
"type [keyword]", error("SELECT DATE_TRUNC(keyword, keyword) FROM test"));
assertEquals("1:8: first argument of [DATE_TRUNC('invalid', keyword)] must be one of [MILLENNIUM, CENTURY, DECADE, " + "" +
"YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND] " +
"or their aliases, found value ['invalid']",
"or their aliases; found value ['invalid']",
error("SELECT DATE_TRUNC('invalid', keyword) FROM test"));
assertEquals("1:8: Unknown value ['millenioum'] for first argument of [DATE_TRUNC('millenioum', keyword)]; " +
"did you mean [millennium, millennia]?",
@ -237,7 +237,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
"type [keyword]", error("SELECT DATE_PART(keyword, keyword) FROM test"));
assertEquals("1:8: first argument of [DATE_PART('invalid', keyword)] must be one of [YEAR, QUARTER, MONTH, DAYOFYEAR, " +
"DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, NANOSECOND, TZOFFSET] " +
"or their aliases, found value ['invalid']",
"or their aliases; found value ['invalid']",
error("SELECT DATE_PART('invalid', keyword) FROM test"));
assertEquals("1:8: Unknown value ['tzofset'] for first argument of [DATE_PART('tzofset', keyword)]; " +
"did you mean [tzoffset]?",
@ -616,13 +616,13 @@ public class VerifierErrorMessagesTests extends ESTestCase {
"No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead",
error("SELECT * FROM test WHERE text LIKE 'foo'"));
}
public void testInvalidTypeForRLikeMatch() {
assertEquals("1:26: [text RLIKE 'foo'] cannot operate on field of data type [text]: " +
"No keyword/multi-field defined exact matches for [text]; define one or use MATCH/QUERY instead",
error("SELECT * FROM test WHERE text RLIKE 'foo'"));
}
public void testAllowCorrectFieldsInIncompatibleMappings() {
assertNotNull(incompatibleAccept("SELECT languages FROM \"*\""));
}
@ -746,32 +746,32 @@ public class VerifierErrorMessagesTests extends ESTestCase {
assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping",
error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test"));
}
public void testHistogramNotInGroupingWithCount() {
assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping",
error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h, COUNT(*) FROM test"));
}
public void testHistogramNotInGroupingWithMaxFirst() {
assertEquals("1:19: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping",
error("SELECT MAX(date), HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test"));
}
public void testHistogramWithoutAliasNotInGrouping() {
assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping",
error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) FROM test"));
}
public void testTwoHistogramsNotInGrouping() {
assertEquals("1:48: [HISTOGRAM(date, INTERVAL 1 DAY)] needs to be part of the grouping",
error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h, HISTOGRAM(date, INTERVAL 1 DAY) FROM test GROUP BY h"));
}
public void testHistogramNotInGrouping_WithGroupByField() {
assertEquals("1:8: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping",
error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) FROM test GROUP BY date"));
}
public void testScalarOfHistogramNotInGrouping() {
assertEquals("1:14: [HISTOGRAM(date, INTERVAL 1 MONTH)] needs to be part of the grouping",
error("SELECT MONTH(HISTOGRAM(date, INTERVAL 1 MONTH)) FROM test"));

View File

@ -0,0 +1,131 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.BinaryPipe;
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
import org.elasticsearch.xpack.sql.tree.AbstractNodeTestCase;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.tree.SourceTests;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
import static org.elasticsearch.xpack.sql.expression.Expressions.pipe;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource;
public class DatePartPipeTests extends AbstractNodeTestCase<DatePartPipe, Pipe> {
@Override
protected DatePartPipe randomInstance() {
return randomDatePartPipe();
}
private Expression randomDatePartPipeExpression() {
return randomDatePartPipe().expression();
}
public static DatePartPipe randomDatePartPipe() {
return (DatePartPipe) new DatePart(
randomSource(),
randomStringLiteral(),
randomDatetimeLiteral(),
randomZone())
.makePipe();
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression),
// skipping the children (the two parameters of the binary function) which are tested separately
DatePartPipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDatePartPipeExpression);
DatePartPipe newB = new DatePartPipe(
b1.source(),
newExpression,
b1.left(),
b1.right(),
b1.zoneId());
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
DatePartPipe b2 = randomInstance();
Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource);
newB = new DatePartPipe(
newLoc,
b2.expression(),
b2.left(),
b2.right(),
b2.zoneId());
assertEquals(newB,
b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class));
}
@Override
public void testReplaceChildren() {
DatePartPipe b = randomInstance();
Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomStringLiteral)));
Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomDatetimeLiteral)));
ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone);
DatePartPipe newB = new DatePartPipe( b.source(), b.expression(), b.left(), b.right(), newZoneId);
BinaryPipe transformed = newB.replaceChildren(newLeft, b.right());
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), b.right());
transformed = newB.replaceChildren(b.left(), newRight);
assertEquals(transformed.left(), b.left());
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
transformed = newB.replaceChildren(newLeft, newRight);
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.source(), b.source());
assertEquals(transformed.expression(), b.expression());
assertEquals(transformed.right(), newRight);
}
@Override
protected DatePartPipe mutate(DatePartPipe instance) {
List<Function<DatePartPipe, DatePartPipe>> randoms = new ArrayList<>();
randoms.add(f -> new DatePartPipe(f.source(), f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
f.right(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
randoms.add(f -> new DatePartPipe(f.source(), f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
randoms.add(f -> new DatePartPipe(f.source(), f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
return randomFrom(randoms).apply(instance);
}
@Override
protected DatePartPipe copy(DatePartPipe instance) {
return new DatePartPipe(
instance.source(),
instance.expression(),
instance.left(),
instance.right(),
instance.zoneId());
}
}

View File

@ -15,7 +15,6 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.ConstantProcessor;
import org.elasticsearch.xpack.sql.tree.Source;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import static org.elasticsearch.xpack.sql.expression.Literal.NULL;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.l;
@ -27,7 +26,7 @@ public class DatePartProcessorTests extends AbstractSqlWireSerializingTestCase<D
public static DatePartProcessor randomDatePartProcessor() {
return new DatePartProcessor(
new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)),
new ConstantProcessor(ZonedDateTime.now()),
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
randomZone());
}
@ -50,7 +49,7 @@ public class DatePartProcessorTests extends AbstractSqlWireSerializingTestCase<D
protected DatePartProcessor mutateInstance(DatePartProcessor instance) {
return new DatePartProcessor(
new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)),
new ConstantProcessor(ZonedDateTime.now()),
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone));
}
@ -72,7 +71,7 @@ public class DatePartProcessorTests extends AbstractSqlWireSerializingTestCase<D
siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DatePart(Source.EMPTY, l("dayfyear"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("Received value [dayfyear] is not valid date part for extraction; did you mean [dayofyear, year]?",
siae.getMessage());
siae.getMessage());
}
public void testWithNulls() {

View File

@ -8,6 +8,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
import org.elasticsearch.xpack.sql.util.DateUtils;
import java.time.Clock;
import java.time.Duration;
import java.time.OffsetTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
@ -39,4 +41,9 @@ public class DateTimeTestUtils {
public static OffsetTime time(int hour, int minute, int second, int nano) {
return OffsetTime.of(hour, minute, second, nano, ZoneOffset.UTC);
}
static ZonedDateTime nowWithMillisResolution() {
Clock millisResolutionClock = Clock.tick(Clock.systemUTC(), Duration.ofMillis(1));
return ZonedDateTime.now(millisResolutionClock);
}
}

View File

@ -22,13 +22,14 @@ import java.util.Objects;
import java.util.function.Function;
import static org.elasticsearch.xpack.sql.expression.Expressions.pipe;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomDatetimeLiteral;
import static org.elasticsearch.xpack.sql.expression.function.scalar.FunctionTestUtils.randomStringLiteral;
import static org.elasticsearch.xpack.sql.tree.SourceTests.randomSource;
public class BinaryDateTimePipeTests extends AbstractNodeTestCase<BinaryDateTimePipe, Pipe> {
public class DateTruncPipeTests extends AbstractNodeTestCase<DateTruncPipe, Pipe> {
@Override
protected BinaryDateTimePipe randomInstance() {
protected DateTruncPipe randomInstance() {
return randomDateTruncPipe();
}
@ -36,52 +37,49 @@ public class BinaryDateTimePipeTests extends AbstractNodeTestCase<BinaryDateTime
return randomDateTruncPipe().expression();
}
public static BinaryDateTimePipe randomDateTruncPipe() {
return (BinaryDateTimePipe) new DateTrunc(
randomSource(),
randomStringLiteral(),
randomStringLiteral(),
randomZone())
.makePipe();
public static DateTruncPipe randomDateTruncPipe() {
return (DateTruncPipe) new DateTrunc(
randomSource(),
randomStringLiteral(),
randomDatetimeLiteral(),
randomZone())
.makePipe();
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression),
// skipping the children (the two parameters of the binary function) which are tested separately
BinaryDateTimePipe b1 = randomInstance();
DateTruncPipe b1 = randomInstance();
Expression newExpression = randomValueOtherThan(b1.expression(), this::randomDateTruncPipeExpression);
BinaryDateTimePipe newB = new BinaryDateTimePipe(
b1.source(),
newExpression,
b1.left(),
b1.right(),
b1.zoneId(),
b1.operation());
DateTruncPipe newB = new DateTruncPipe(
b1.source(),
newExpression,
b1.left(),
b1.right(),
b1.zoneId());
assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class));
BinaryDateTimePipe b2 = randomInstance();
DateTruncPipe b2 = randomInstance();
Source newLoc = randomValueOtherThan(b2.source(), SourceTests::randomSource);
newB = new BinaryDateTimePipe(
newLoc,
b2.expression(),
b2.left(),
b2.right(),
b2.zoneId(),
b2.operation());
newB = new DateTruncPipe(
newLoc,
b2.expression(),
b2.left(),
b2.right(),
b2.zoneId());
assertEquals(newB,
b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class));
b2.transformPropertiesOnly(v -> Objects.equals(v, b2.source()) ? newLoc : v, Source.class));
}
@Override
public void testReplaceChildren() {
BinaryDateTimePipe b = randomInstance();
DateTruncPipe b = randomInstance();
Pipe newLeft = pipe(((Expression) randomValueOtherThan(b.left(), FunctionTestUtils::randomStringLiteral)));
Pipe newRight = pipe(((Expression) randomValueOtherThan(b.right(), FunctionTestUtils::randomDatetimeLiteral)));
ZoneId newZoneId = randomValueOtherThan(b.zoneId(), ESTestCase::randomZone);
BinaryDateTimePipe newB = new BinaryDateTimePipe(
b.source(), b.expression(), b.left(), b.right(), newZoneId, randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values()));
DateTruncPipe newB = new DateTruncPipe( b.source(), b.expression(), b.left(), b.right(), newZoneId);
BinaryPipe transformed = newB.replaceChildren(newLeft, b.right());
assertEquals(transformed.left(), newLeft);
@ -103,37 +101,31 @@ public class BinaryDateTimePipeTests extends AbstractNodeTestCase<BinaryDateTime
}
@Override
protected BinaryDateTimePipe mutate(BinaryDateTimePipe instance) {
List<Function<BinaryDateTimePipe, BinaryDateTimePipe>> randoms = new ArrayList<>();
randoms.add(f -> new BinaryDateTimePipe(f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
f.right(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values())));
randoms.add(f -> new BinaryDateTimePipe(f.source(),
f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values())));
randoms.add(f -> new BinaryDateTimePipe(f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone),
randomFrom(BinaryDateTimeProcessor.BinaryDateOperation.values())));
protected DateTruncPipe mutate(DateTruncPipe instance) {
List<Function<DateTruncPipe, DateTruncPipe>> randoms = new ArrayList<>();
randoms.add(f -> new DateTruncPipe(f.source(), f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
f.right(),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
randoms.add(f -> new DateTruncPipe(f.source(), f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
randoms.add(f -> new DateTruncPipe(f.source(), f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomDatetimeLiteral))),
randomValueOtherThan(f.zoneId(), ESTestCase::randomZone)));
return randomFrom(randoms).apply(instance);
}
@Override
protected BinaryDateTimePipe copy(BinaryDateTimePipe instance) {
return new BinaryDateTimePipe(instance.source(),
instance.expression(),
instance.left(),
instance.right(),
instance.zoneId(),
instance.operation());
protected DateTruncPipe copy(DateTruncPipe instance) {
return new DateTruncPipe(
instance.source(),
instance.expression(),
instance.left(),
instance.right(),
instance.zoneId());
}
}

View File

@ -29,7 +29,7 @@ public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase<
public static DateTruncProcessor randomDateTruncProcessor() {
return new DateTruncProcessor(
new ConstantProcessor(randomRealisticUnicodeOfLengthBetween(0, 128)),
new ConstantProcessor(ZonedDateTime.now()),
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
randomZone());
}
@ -52,13 +52,13 @@ public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase<
protected DateTruncProcessor mutateInstance(DateTruncProcessor instance) {
return new DateTruncProcessor(
new ConstantProcessor(ESTestCase.randomRealisticUnicodeOfLength(128)),
new ConstantProcessor(ZonedDateTime.now()),
new ConstantProcessor(DateTimeTestUtils.nowWithMillisResolution()),
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone));
}
public void testInvalidInputs() {
SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
() -> new DateTrunc(Source.EMPTY, l(5), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("A string is required; received [5]", siae.getMessage());
siae = expectThrows(SqlIllegalArgumentException.class,
@ -68,13 +68,13 @@ public class DateTruncProcessorTests extends AbstractSqlWireSerializingTestCase<
siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateTrunc(Source.EMPTY, l("invalid"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("A value of [MILLENNIUM, CENTURY, DECADE, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, " +
"SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]",
"SECOND, MILLISECOND, MICROSECOND, NANOSECOND] or their aliases is required; received [invalid]",
siae.getMessage());
siae = expectThrows(SqlIllegalArgumentException.class,
() -> new DateTrunc(Source.EMPTY, l("dacede"), randomDatetimeLiteral(), randomZone()).makePipe().asProcessor().process(null));
assertEquals("Received value [dacede] is not valid date part for truncation; did you mean [decade, decades]?",
siae.getMessage());
siae.getMessage());
}
public void testWithNulls() {