SQL: Fix issue with date columns returned always in UTC (#40163)
When selecting columns of ES type `date` (SQL's DATETIME) the `FieldHitExtractor` was not using the timezone of the client session but always resorted to UTC. The same behaviour (UTC only) was encountered also for grouping keys (`CompositeKeyExtractor`) and for First/Last functions on dates (`TopHitsAggExtractor`). Fixes: #40152
This commit is contained in:
parent
95f1c8bea0
commit
bc4c8e53c5
|
@ -137,7 +137,7 @@ public abstract class JdbcTestUtils {
|
|||
logger.info("\n" + formatter.formatWithHeader(cols, data));
|
||||
}
|
||||
|
||||
public static String of(long millis) {
|
||||
return StringUtils.toString(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC));
|
||||
public static String of(long millis, String zoneId) {
|
||||
return StringUtils.toString(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(zoneId)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.common.collect.Tuple;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.xpack.sql.jdbc.EsType;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -78,7 +79,14 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
dateTimeTestingFields.put(new Tuple<String, Object>("test_float", 1f), EsType.FLOAT);
|
||||
dateTimeTestingFields.put(new Tuple<String, Object>("test_keyword", "true"), EsType.KEYWORD);
|
||||
}
|
||||
|
||||
|
||||
private String timeZoneId;
|
||||
|
||||
@Before
|
||||
public void chooseRandomTimeZone() {
|
||||
this.timeZoneId = randomKnownTimeZone();
|
||||
}
|
||||
|
||||
public void testMultiValueFieldWithMultiValueLeniencyEnabled() throws Exception {
|
||||
createTestDataForMultiValueTests();
|
||||
|
||||
|
@ -104,7 +112,6 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
expected = expectThrows(SQLException.class,
|
||||
() -> doWithQuery(() -> esJdbc(), "SELECT int, keyword FROM test", (results) -> {
|
||||
}));
|
||||
|
||||
}
|
||||
|
||||
// Byte values testing
|
||||
|
@ -228,10 +235,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
sqle.getMessage());
|
||||
|
||||
sqle = expectThrows(SQLException.class, () -> results.getByte("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Byte.class));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
});
|
||||
}
|
||||
|
@ -351,10 +358,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
sqle.getMessage());
|
||||
|
||||
sqle = expectThrows(SQLException.class, () -> results.getShort("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Short.class));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
});
|
||||
}
|
||||
|
@ -466,11 +473,11 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
sqle.getMessage());
|
||||
|
||||
sqle = expectThrows(SQLException.class, () -> results.getInt("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)),
|
||||
sqle.getMessage());
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]",
|
||||
asDateString(randomDate)), sqle.getMessage());
|
||||
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Integer.class));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)),
|
||||
sqle.getMessage());
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]",
|
||||
asDateString(randomDate)), sqle.getMessage());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -568,10 +575,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
sqle.getMessage());
|
||||
|
||||
sqle = expectThrows(SQLException.class, () -> results.getLong("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Long.class));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
});
|
||||
}
|
||||
|
@ -651,10 +658,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
sqle.getMessage());
|
||||
|
||||
sqle = expectThrows(SQLException.class, () -> results.getDouble("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Double.class));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
});
|
||||
}
|
||||
|
@ -734,10 +741,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
sqle.getMessage());
|
||||
|
||||
sqle = expectThrows(SQLException.class, () -> results.getFloat("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Float.class));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)),
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", asDateString(randomDate)),
|
||||
sqle.getMessage());
|
||||
});
|
||||
}
|
||||
|
@ -795,8 +802,8 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
assertEquals("Expected: <true> but was: <false> for field " + fld, true, results.getObject(fld, Boolean.class));
|
||||
}
|
||||
SQLException sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate1)),
|
||||
sqle.getMessage());
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]",
|
||||
asDateString(randomDate1)), sqle.getMessage());
|
||||
|
||||
results.next();
|
||||
assertEquals(false, results.getBoolean("test_boolean"));
|
||||
|
@ -805,12 +812,12 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
assertEquals("Expected: <false> but was: <true> for field " + fld, false, results.getObject(fld, Boolean.class));
|
||||
}
|
||||
sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date"));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)),
|
||||
sqle.getMessage());
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]",
|
||||
asDateString(randomDate2)), sqle.getMessage());
|
||||
|
||||
sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Boolean.class));
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)),
|
||||
sqle.getMessage());
|
||||
assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]",
|
||||
asDateString(randomDate2)), sqle.getMessage());
|
||||
|
||||
results.next();
|
||||
for(String fld : fieldsNames.stream()
|
||||
|
@ -835,23 +842,22 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
Long randomLongDate = randomNonNegativeLong();
|
||||
indexSimpleDocumentWithTrueValues(randomLongDate);
|
||||
|
||||
String timeZoneId = randomKnownTimeZone();
|
||||
Calendar connCalendar = Calendar.getInstance(TimeZone.getTimeZone(timeZoneId), Locale.ROOT);
|
||||
|
||||
doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> {
|
||||
doWithQuery(SELECT_ALL_FIELDS, (results) -> {
|
||||
results.next();
|
||||
connCalendar.setTimeInMillis(randomLongDate);
|
||||
connCalendar.set(HOUR_OF_DAY, 0);
|
||||
connCalendar.set(MINUTE, 0);
|
||||
connCalendar.set(SECOND, 0);
|
||||
connCalendar.set(MILLISECOND, 0);
|
||||
|
||||
assertEquals(results.getDate("test_date"), new java.sql.Date(connCalendar.getTimeInMillis()));
|
||||
assertEquals(results.getDate(9), new java.sql.Date(connCalendar.getTimeInMillis()));
|
||||
assertEquals(results.getObject("test_date", java.sql.Date.class),
|
||||
new java.sql.Date(randomLongDate - (randomLongDate % 86400000L)));
|
||||
assertEquals(results.getObject(9, java.sql.Date.class),
|
||||
new java.sql.Date(randomLongDate - (randomLongDate % 86400000L)));
|
||||
|
||||
java.sql.Date expectedDate = new java.sql.Date(connCalendar.getTimeInMillis());
|
||||
|
||||
assertEquals(expectedDate, results.getDate("test_date"));
|
||||
assertEquals(expectedDate, results.getDate(9));
|
||||
assertEquals(expectedDate, results.getObject("test_date", java.sql.Date.class));
|
||||
assertEquals(expectedDate, results.getObject(9, java.sql.Date.class));
|
||||
|
||||
// bulk validation for all fields which are not of type date
|
||||
validateErrorsForDateTimeTestsWithoutCalendar(results::getDate);
|
||||
|
@ -871,11 +877,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
builder.timeField("test_date", null);
|
||||
});
|
||||
|
||||
String timeZoneId = randomKnownTimeZone();
|
||||
String anotherTZId = randomValueOtherThan(timeZoneId, () -> randomKnownTimeZone());
|
||||
Calendar c = Calendar.getInstance(TimeZone.getTimeZone(anotherTZId), Locale.ROOT);
|
||||
|
||||
doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> {
|
||||
doWithQuery(SELECT_ALL_FIELDS, (results) -> {
|
||||
results.next();
|
||||
c.setTimeInMillis(randomLongDate);
|
||||
c.set(HOUR_OF_DAY, 0);
|
||||
|
@ -904,10 +909,9 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
Long randomLongDate = randomNonNegativeLong();
|
||||
indexSimpleDocumentWithTrueValues(randomLongDate);
|
||||
|
||||
String timeZoneId = randomKnownTimeZone();
|
||||
Calendar c = Calendar.getInstance(TimeZone.getTimeZone(timeZoneId), Locale.ROOT);
|
||||
|
||||
doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> {
|
||||
doWithQuery(SELECT_ALL_FIELDS, (results) -> {
|
||||
results.next();
|
||||
c.setTimeInMillis(randomLongDate);
|
||||
c.set(ERA, GregorianCalendar.AD);
|
||||
|
@ -939,11 +943,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
builder.timeField("test_date", null);
|
||||
});
|
||||
|
||||
String timeZoneId = randomKnownTimeZone();
|
||||
String anotherTZId = randomValueOtherThan(timeZoneId, () -> randomKnownTimeZone());
|
||||
Calendar c = Calendar.getInstance(TimeZone.getTimeZone(anotherTZId), Locale.ROOT);
|
||||
|
||||
doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> {
|
||||
doWithQuery(SELECT_ALL_FIELDS, (results) -> {
|
||||
results.next();
|
||||
c.setTimeInMillis(randomLongDate);
|
||||
c.set(ERA, GregorianCalendar.AD);
|
||||
|
@ -1016,11 +1019,10 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
builder.timeField("test_date", null);
|
||||
});
|
||||
|
||||
String timeZoneId = randomKnownTimeZone();
|
||||
String anotherTZId = randomValueOtherThan(timeZoneId, () -> randomKnownTimeZone());
|
||||
Calendar c = Calendar.getInstance(TimeZone.getTimeZone(anotherTZId), Locale.ROOT);
|
||||
|
||||
doWithQueryAndTimezone(SELECT_ALL_FIELDS, timeZoneId, (results) -> {
|
||||
doWithQuery(SELECT_ALL_FIELDS, (results) -> {
|
||||
results.next();
|
||||
c.setTimeInMillis(randomLongDate);
|
||||
|
||||
|
@ -1054,10 +1056,11 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
connCalendar1.set(SECOND, 0);
|
||||
connCalendar1.set(MILLISECOND, 0);
|
||||
|
||||
assertEquals(new java.sql.Date(connCalendar1.getTimeInMillis()), results.getDate("test_date"));
|
||||
assertEquals(new java.sql.Date(connCalendar1.getTimeInMillis()), results.getDate(1));
|
||||
assertEquals(new java.sql.Date(dateInMillis - (dateInMillis % 86400000L)), results.getObject("test_date", java.sql.Date.class));
|
||||
assertEquals(new java.sql.Date(dateInMillis - (dateInMillis % 86400000L)), results.getObject(1, java.sql.Date.class));
|
||||
java.sql.Date expectedDate = new java.sql.Date(connCalendar1.getTimeInMillis());
|
||||
assertEquals(expectedDate, results.getDate("test_date"));
|
||||
assertEquals(expectedDate, results.getDate(1));
|
||||
assertEquals(expectedDate, results.getObject("test_date", java.sql.Date.class));
|
||||
assertEquals(expectedDate, results.getObject(1, java.sql.Date.class));
|
||||
|
||||
// +1 day
|
||||
assertEquals(13, results.getInt("day"));
|
||||
|
@ -1082,12 +1085,11 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
connCalendar2.set(SECOND, 0);
|
||||
connCalendar2.set(MILLISECOND, 0);
|
||||
|
||||
assertEquals(new java.sql.Date(connCalendar2.getTimeInMillis()), results.getDate("test_date"));
|
||||
assertEquals(new java.sql.Date(connCalendar2.getTimeInMillis()), results.getDate(1));
|
||||
assertEquals(new java.sql.Date(dateInMillis2 - (dateInMillis2 % 86400000L)),
|
||||
results.getObject("test_date", java.sql.Date.class));
|
||||
assertEquals(new java.sql.Date(dateInMillis2 - (dateInMillis2 % 86400000L)),
|
||||
results.getObject(1, java.sql.Date.class));
|
||||
java.sql.Date expectedDate = new java.sql.Date(connCalendar2.getTimeInMillis());
|
||||
assertEquals(expectedDate, results.getDate("test_date"));
|
||||
assertEquals(expectedDate, results.getDate(1));
|
||||
assertEquals(expectedDate, results.getObject("test_date", java.sql.Date.class));
|
||||
assertEquals(expectedDate, results.getObject(1, java.sql.Date.class));
|
||||
|
||||
// -1 day
|
||||
assertEquals(11, results.getInt("day"));
|
||||
|
@ -1330,7 +1332,7 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
}
|
||||
|
||||
private void doWithQuery(String query, CheckedConsumer<ResultSet, SQLException> consumer) throws SQLException {
|
||||
doWithQuery(() -> esJdbc(), query, consumer);
|
||||
doWithQuery(() -> esJdbc(timeZoneId), query, consumer);
|
||||
}
|
||||
|
||||
private void doWithQueryAndTimezone(String query, String tz, CheckedConsumer<ResultSet, SQLException> consumer) throws SQLException {
|
||||
|
@ -1638,4 +1640,8 @@ public class ResultSetTestCase extends JdbcIntegrationTestCase {
|
|||
assertNotNull("The leniency should be specified", connectionProperties.getProperty(property));
|
||||
return connection;
|
||||
}
|
||||
|
||||
private String asDateString(long millis) {
|
||||
return of(millis, timeZoneId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -365,7 +365,6 @@ public class Querier {
|
|||
super(listener, client, cfg, output, query, request);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void handleResponse(SearchResponse response, ActionListener<SchemaRowSet> listener) {
|
||||
// there are some results
|
||||
|
@ -428,7 +427,7 @@ public class Querier {
|
|||
private BucketExtractor createExtractor(FieldExtraction ref, BucketExtractor totalCount) {
|
||||
if (ref instanceof GroupByRef) {
|
||||
GroupByRef r = (GroupByRef) ref;
|
||||
return new CompositeKeyExtractor(r.key(), r.property(), r.zoneId());
|
||||
return new CompositeKeyExtractor(r.key(), r.property(), cfg.zoneId(), r.isDateTimeBased());
|
||||
}
|
||||
|
||||
if (ref instanceof MetricAggRef) {
|
||||
|
@ -438,7 +437,7 @@ public class Querier {
|
|||
|
||||
if (ref instanceof TopHitsAggRef) {
|
||||
TopHitsAggRef r = (TopHitsAggRef) ref;
|
||||
return new TopHitsAggExtractor(r.name(), r.fieldDataType());
|
||||
return new TopHitsAggExtractor(r.name(), r.fieldDataType(), cfg.zoneId());
|
||||
}
|
||||
|
||||
if (ref == GlobalCountRef.INSTANCE) {
|
||||
|
@ -518,12 +517,13 @@ public class Querier {
|
|||
private HitExtractor createExtractor(FieldExtraction ref) {
|
||||
if (ref instanceof SearchHitFieldRef) {
|
||||
SearchHitFieldRef f = (SearchHitFieldRef) ref;
|
||||
return new FieldHitExtractor(f.name(), f.getDataType(), f.useDocValue(), f.hitName(), multiValueFieldLeniency);
|
||||
return new FieldHitExtractor(f.name(), f.getDataType(), cfg.zoneId(),
|
||||
f.useDocValue(), f.hitName(), multiValueFieldLeniency);
|
||||
}
|
||||
|
||||
if (ref instanceof ScriptFieldRef) {
|
||||
ScriptFieldRef f = (ScriptFieldRef) ref;
|
||||
return new FieldHitExtractor(f.name(), null, true, multiValueFieldLeniency);
|
||||
return new FieldHitExtractor(f.name(), null, cfg.zoneId(), true, multiValueFieldLeniency);
|
||||
}
|
||||
|
||||
if (ref instanceof ComputedRef) {
|
||||
|
|
|
@ -20,44 +20,38 @@ import java.util.Objects;
|
|||
public class CompositeKeyExtractor implements BucketExtractor {
|
||||
|
||||
/**
|
||||
* Key or Komposite extractor.
|
||||
* Key or Composite extractor.
|
||||
*/
|
||||
static final String NAME = "k";
|
||||
|
||||
private final String key;
|
||||
private final Property property;
|
||||
private final ZoneId zoneId;
|
||||
private final boolean isDateTimeBased;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>CompositeKeyExtractor</code> instance.
|
||||
* The time-zone parameter is used to indicate a date key.
|
||||
*/
|
||||
public CompositeKeyExtractor(String key, Property property, ZoneId zoneId) {
|
||||
public CompositeKeyExtractor(String key, Property property, ZoneId zoneId, boolean isDateTimeBased) {
|
||||
this.key = key;
|
||||
this.property = property;
|
||||
this.zoneId = zoneId;
|
||||
this.isDateTimeBased = isDateTimeBased;
|
||||
}
|
||||
|
||||
CompositeKeyExtractor(StreamInput in) throws IOException {
|
||||
key = in.readString();
|
||||
property = in.readEnum(Property.class);
|
||||
if (in.readBoolean()) {
|
||||
zoneId = ZoneId.of(in.readString());
|
||||
} else {
|
||||
zoneId = null;
|
||||
}
|
||||
zoneId = ZoneId.of(in.readString());
|
||||
isDateTimeBased = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(key);
|
||||
out.writeEnum(property);
|
||||
if (zoneId == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeString(zoneId.getId());
|
||||
}
|
||||
out.writeString(zoneId.getId());
|
||||
out.writeBoolean(isDateTimeBased);
|
||||
}
|
||||
|
||||
String key() {
|
||||
|
@ -72,6 +66,10 @@ public class CompositeKeyExtractor implements BucketExtractor {
|
|||
return zoneId;
|
||||
}
|
||||
|
||||
public boolean isDateTimeBased() {
|
||||
return isDateTimeBased;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
|
@ -91,7 +89,7 @@ public class CompositeKeyExtractor implements BucketExtractor {
|
|||
|
||||
Object object = ((Map<?, ?>) m).get(key);
|
||||
|
||||
if (zoneId != null) {
|
||||
if (isDateTimeBased) {
|
||||
if (object == null) {
|
||||
return object;
|
||||
} else if (object instanceof Long) {
|
||||
|
@ -106,7 +104,7 @@ public class CompositeKeyExtractor implements BucketExtractor {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(key, property, zoneId);
|
||||
return Objects.hash(key, property, zoneId, isDateTimeBased);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -122,7 +120,8 @@ public class CompositeKeyExtractor implements BucketExtractor {
|
|||
CompositeKeyExtractor other = (CompositeKeyExtractor) obj;
|
||||
return Objects.equals(key, other.key)
|
||||
&& Objects.equals(property, other.property)
|
||||
&& Objects.equals(zoneId, other.zoneId);
|
||||
&& Objects.equals(zoneId, other.zoneId)
|
||||
&& Objects.equals(isDateTimeBased, other.isDateTimeBased);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.xpack.sql.type.DataType;
|
|||
import org.elasticsearch.xpack.sql.util.DateUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Deque;
|
||||
import java.util.List;
|
||||
|
@ -45,21 +46,23 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
|
||||
private final String fieldName, hitName;
|
||||
private final DataType dataType;
|
||||
private final ZoneId zoneId;
|
||||
private final boolean useDocValue;
|
||||
private final boolean arrayLeniency;
|
||||
private final String[] path;
|
||||
|
||||
public FieldHitExtractor(String name, DataType dataType, boolean useDocValue) {
|
||||
this(name, dataType, useDocValue, null, false);
|
||||
public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean useDocValue) {
|
||||
this(name, dataType, zoneId, useDocValue, null, false);
|
||||
}
|
||||
|
||||
public FieldHitExtractor(String name, DataType dataType, boolean useDocValue, boolean arrayLeniency) {
|
||||
this(name, dataType, useDocValue, null, arrayLeniency);
|
||||
public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean useDocValue, boolean arrayLeniency) {
|
||||
this(name, dataType, zoneId, useDocValue, null, arrayLeniency);
|
||||
}
|
||||
|
||||
public FieldHitExtractor(String name, DataType dataType, boolean useDocValue, String hitName, boolean arrayLeniency) {
|
||||
public FieldHitExtractor(String name, DataType dataType, ZoneId zoneId, boolean useDocValue, String hitName, boolean arrayLeniency) {
|
||||
this.fieldName = name;
|
||||
this.dataType = dataType;
|
||||
this.zoneId = zoneId;
|
||||
this.useDocValue = useDocValue;
|
||||
this.arrayLeniency = arrayLeniency;
|
||||
this.hitName = hitName;
|
||||
|
@ -77,6 +80,7 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
fieldName = in.readString();
|
||||
String esType = in.readOptionalString();
|
||||
dataType = esType != null ? DataType.fromTypeName(esType) : null;
|
||||
zoneId = ZoneId.of(in.readString());
|
||||
useDocValue = in.readBoolean();
|
||||
hitName = in.readOptionalString();
|
||||
arrayLeniency = in.readBoolean();
|
||||
|
@ -92,6 +96,7 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeOptionalString(dataType == null ? null : dataType.typeName);
|
||||
out.writeString(zoneId.getId());
|
||||
out.writeBoolean(useDocValue);
|
||||
out.writeOptionalString(hitName);
|
||||
out.writeBoolean(arrayLeniency);
|
||||
|
@ -135,7 +140,7 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
}
|
||||
if (dataType == DataType.DATETIME) {
|
||||
if (values instanceof String) {
|
||||
return DateUtils.asDateTime(Long.parseLong(values.toString()));
|
||||
return DateUtils.asDateTime(Long.parseLong(values.toString()), zoneId);
|
||||
}
|
||||
}
|
||||
if (values instanceof Long || values instanceof Double || values instanceof String || values instanceof Boolean) {
|
||||
|
@ -215,9 +220,17 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
return fieldName;
|
||||
}
|
||||
|
||||
public ZoneId zoneId() {
|
||||
return zoneId;
|
||||
}
|
||||
|
||||
DataType dataType() {
|
||||
return dataType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return fieldName + "@" + hitName;
|
||||
return fieldName + "@" + hitName + "@" + zoneId;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.elasticsearch.xpack.sql.type.DataType;
|
|||
import org.elasticsearch.xpack.sql.util.DateUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.Objects;
|
||||
|
||||
public class TopHitsAggExtractor implements BucketExtractor {
|
||||
|
@ -22,27 +23,39 @@ public class TopHitsAggExtractor implements BucketExtractor {
|
|||
|
||||
private final String name;
|
||||
private final DataType fieldDataType;
|
||||
private final ZoneId zoneId;
|
||||
|
||||
public TopHitsAggExtractor(String name, DataType fieldDataType) {
|
||||
public TopHitsAggExtractor(String name, DataType fieldDataType, ZoneId zoneId) {
|
||||
this.name = name;
|
||||
this.fieldDataType = fieldDataType;
|
||||
this.zoneId = zoneId;
|
||||
}
|
||||
|
||||
TopHitsAggExtractor(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
fieldDataType = in.readEnum(DataType.class);
|
||||
zoneId = ZoneId.of(in.readString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeEnum(fieldDataType);
|
||||
out.writeString(zoneId.getId());
|
||||
}
|
||||
|
||||
String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
DataType fieldDataType() {
|
||||
return fieldDataType;
|
||||
}
|
||||
|
||||
ZoneId zoneId() {
|
||||
return zoneId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
|
@ -61,7 +74,7 @@ public class TopHitsAggExtractor implements BucketExtractor {
|
|||
|
||||
Object value = agg.getHits().getAt(0).getFields().values().iterator().next().getValue();
|
||||
if (fieldDataType.isDateBased()) {
|
||||
return DateUtils.asDateTime(Long.parseLong(value.toString()));
|
||||
return DateUtils.asDateTime(Long.parseLong(value.toString()), zoneId);
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
|
@ -69,7 +82,7 @@ public class TopHitsAggExtractor implements BucketExtractor {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, fieldDataType);
|
||||
return Objects.hash(name, fieldDataType, zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -84,11 +97,12 @@ public class TopHitsAggExtractor implements BucketExtractor {
|
|||
|
||||
TopHitsAggExtractor other = (TopHitsAggExtractor) obj;
|
||||
return Objects.equals(name, other.name)
|
||||
&& Objects.equals(fieldDataType, other.fieldDataType);
|
||||
&& Objects.equals(fieldDataType, other.fieldDataType)
|
||||
&& Objects.equals(zoneId, other.zoneId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TopHits>" + name + "[" + fieldDataType + "]";
|
||||
return "TopHits>" + name + "[" + fieldDataType + "]@" + zoneId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,14 +5,14 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.planner;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
|
||||
import org.elasticsearch.xpack.sql.planner.Verifier.Failure;
|
||||
import org.elasticsearch.xpack.sql.tree.Node;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
|
||||
public class Planner {
|
||||
|
@ -64,4 +64,4 @@ public class Planner {
|
|||
List<Failure> failures = Verifier.verifyExecutingPlan(plan);
|
||||
return failures.stream().collect(toMap(Failure::source, Failure::message));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.TopHits;
|
|||
import org.elasticsearch.xpack.sql.expression.function.grouping.GroupingFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.AggPathInput;
|
||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
|
||||
|
@ -64,9 +63,7 @@ import org.elasticsearch.xpack.sql.rule.Rule;
|
|||
import org.elasticsearch.xpack.sql.rule.RuleExecutor;
|
||||
import org.elasticsearch.xpack.sql.session.EmptyExecutable;
|
||||
import org.elasticsearch.xpack.sql.util.Check;
|
||||
import org.elasticsearch.xpack.sql.util.DateUtils;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
@ -292,7 +289,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
if (matchingGroup != null) {
|
||||
if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) {
|
||||
Processor action = null;
|
||||
ZoneId zi = exp.dataType().isDateBased() ? DateUtils.UTC : null;
|
||||
boolean isDateBased = exp.dataType().isDateBased();
|
||||
/*
|
||||
* special handling of dates since aggs return the typed Date object which needs
|
||||
* extraction instead of handling this in the scroller, the folder handles this
|
||||
|
@ -300,9 +297,10 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
*/
|
||||
if (exp instanceof DateTimeHistogramFunction) {
|
||||
action = ((UnaryPipe) p).action();
|
||||
zi = ((DateTimeFunction) exp).zoneId();
|
||||
isDateBased = true;
|
||||
}
|
||||
return new AggPathInput(exp.source(), exp, new GroupByRef(matchingGroup.id(), null, zi), action);
|
||||
return new AggPathInput(exp.source(), exp,
|
||||
new GroupByRef(matchingGroup.id(), null, isDateBased), action);
|
||||
}
|
||||
}
|
||||
// or found an aggregate expression (which has to work on an attribute used for grouping)
|
||||
|
@ -340,15 +338,12 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
// attributes can only refer to declared groups
|
||||
if (child instanceof Attribute) {
|
||||
Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(child));
|
||||
// check if the field is a date - if so mark it as such to interpret the long as a date
|
||||
// UTC is used since that's what the server uses and there's no conversion applied
|
||||
// (like for date histograms)
|
||||
ZoneId zi = child.dataType().isDateBased() ? DateUtils.UTC : null;
|
||||
queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi), ((Attribute) child));
|
||||
queryC = queryC.addColumn(
|
||||
new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()), ((Attribute) child));
|
||||
}
|
||||
// handle histogram
|
||||
else if (child instanceof GroupingFunction) {
|
||||
queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, null),
|
||||
queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, child.dataType().isDateBased()),
|
||||
((GroupingFunction) child).toAttribute());
|
||||
}
|
||||
// fallback to regular agg functions
|
||||
|
@ -369,8 +364,8 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
matchingGroup = groupingContext.groupFor(ne);
|
||||
Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne));
|
||||
|
||||
ZoneId zi = ne.dataType().isDateBased() ? DateUtils.UTC : null;
|
||||
queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi), ne.toAttribute());
|
||||
queryC = queryC.addColumn(
|
||||
new GroupByRef(matchingGroup.id(), null, ne.dataType().isDateBased()), ne.toAttribute());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -400,7 +395,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
// if the count points to the total track hits, enable accurate count retrieval
|
||||
queryC = queryC.withTrackHits();
|
||||
} else {
|
||||
ref = new GroupByRef(groupingAgg.id(), Property.COUNT, null);
|
||||
ref = new GroupByRef(groupingAgg.id(), Property.COUNT, false);
|
||||
}
|
||||
|
||||
Map<String, GroupByKey> pseudoFunctions = new LinkedHashMap<>(queryC.pseudoFunctions());
|
||||
|
|
|
@ -7,8 +7,6 @@ package org.elasticsearch.xpack.sql.querydsl.container;
|
|||
|
||||
import org.elasticsearch.xpack.sql.execution.search.AggRef;
|
||||
|
||||
import java.time.ZoneId;
|
||||
|
||||
/**
|
||||
* Reference to a GROUP BY agg (typically this gets translated to a composite key).
|
||||
*/
|
||||
|
@ -20,12 +18,12 @@ public class GroupByRef extends AggRef {
|
|||
|
||||
private final String key;
|
||||
private final Property property;
|
||||
private final ZoneId zoneId;
|
||||
private final boolean isDateTimeBased;
|
||||
|
||||
public GroupByRef(String key, Property property, ZoneId zoneId) {
|
||||
public GroupByRef(String key, Property property, boolean isDateTimeBased) {
|
||||
this.key = key;
|
||||
this.property = property == null ? Property.VALUE : property;
|
||||
this.zoneId = zoneId;
|
||||
this.isDateTimeBased = isDateTimeBased;
|
||||
}
|
||||
|
||||
public String key() {
|
||||
|
@ -36,8 +34,8 @@ public class GroupByRef extends AggRef {
|
|||
return property;
|
||||
}
|
||||
|
||||
public ZoneId zoneId() {
|
||||
return zoneId;
|
||||
public boolean isDateTimeBased() {
|
||||
return isDateTimeBased;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -76,7 +76,7 @@ public final class Cursors {
|
|||
}
|
||||
return os.toString(StandardCharsets.UTF_8.name());
|
||||
} catch (Exception ex) {
|
||||
throw new SqlIllegalArgumentException("Unexpected failure retriving next page", ex);
|
||||
throw new SqlIllegalArgumentException("Unexpected failure retrieving next page", ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -9,22 +9,23 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
|
|||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef.Property;
|
||||
import org.elasticsearch.xpack.sql.util.DateUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.xpack.sql.util.DateUtils.UTC;
|
||||
|
||||
public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase<CompositeKeyExtractor> {
|
||||
|
||||
public static CompositeKeyExtractor randomCompositeKeyExtractor() {
|
||||
return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeZone());
|
||||
return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeZone(), randomBoolean());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -38,19 +39,23 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase<
|
|||
}
|
||||
|
||||
@Override
|
||||
protected CompositeKeyExtractor mutateInstance(CompositeKeyExtractor instance) throws IOException {
|
||||
return new CompositeKeyExtractor(instance.key() + "mutated", instance.property(), instance.zoneId());
|
||||
protected CompositeKeyExtractor mutateInstance(CompositeKeyExtractor instance) {
|
||||
return new CompositeKeyExtractor(
|
||||
instance.key() + "mutated",
|
||||
randomValueOtherThan(instance.property(), () -> randomFrom(Property.values())),
|
||||
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone),
|
||||
!instance.isDateTimeBased());
|
||||
}
|
||||
|
||||
public void testExtractBucketCount() {
|
||||
Bucket bucket = new TestBucket(emptyMap(), randomLong(), new Aggregations(emptyList()));
|
||||
CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.COUNT,
|
||||
randomZone());
|
||||
randomZone(), false);
|
||||
assertEquals(bucket.getDocCount(), extractor.extract(bucket));
|
||||
}
|
||||
|
||||
public void testExtractKey() {
|
||||
CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, null);
|
||||
CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, UTC, false);
|
||||
|
||||
Object value = new Object();
|
||||
Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList()));
|
||||
|
@ -58,7 +63,7 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase<
|
|||
}
|
||||
|
||||
public void testExtractDate() {
|
||||
CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeZone());
|
||||
CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeZone(), true);
|
||||
|
||||
long millis = System.currentTimeMillis();
|
||||
Bucket bucket = new TestBucket(singletonMap(extractor.key(), millis), randomLong(), new Aggregations(emptyList()));
|
||||
|
@ -66,7 +71,7 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase<
|
|||
}
|
||||
|
||||
public void testExtractIncorrectDateKey() {
|
||||
CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomZone());
|
||||
CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomZone(), true);
|
||||
|
||||
Object value = new Object();
|
||||
Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList()));
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.function.Supplier;
|
|||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine;
|
||||
import static org.elasticsearch.xpack.sql.util.DateUtils.UTC;
|
||||
|
||||
public class ComputingExtractorTests extends AbstractWireSerializingTestCase<ComputingExtractor> {
|
||||
public static ComputingExtractor randomComputingExtractor() {
|
||||
|
@ -70,7 +71,7 @@ public class ComputingExtractorTests extends AbstractWireSerializingTestCase<Com
|
|||
public void testGet() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
ChainingProcessor extractor = new ChainingProcessor(
|
||||
new HitExtractorProcessor(new FieldHitExtractor(fieldName, null, true)),
|
||||
new HitExtractorProcessor(new FieldHitExtractor(fieldName, null, UTC, true, false)),
|
||||
new MathProcessor(MathOperation.LOG));
|
||||
|
||||
int times = between(1, 1000);
|
||||
|
|
|
@ -18,6 +18,7 @@ import org.elasticsearch.xpack.sql.type.DataType;
|
|||
import org.elasticsearch.xpack.sql.util.DateUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -30,13 +31,15 @@ import java.util.function.Supplier;
|
|||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.xpack.sql.util.DateUtils.UTC;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<FieldHitExtractor> {
|
||||
|
||||
public static FieldHitExtractor randomFieldHitExtractor() {
|
||||
String hitName = randomAlphaOfLength(5);
|
||||
String name = randomAlphaOfLength(5) + "." + hitName;
|
||||
return new FieldHitExtractor(name, null, randomBoolean(), hitName, false);
|
||||
return new FieldHitExtractor(name, null, randomZone(), randomBoolean(), hitName, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -51,7 +54,13 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
@Override
|
||||
protected FieldHitExtractor mutateInstance(FieldHitExtractor instance) {
|
||||
return new FieldHitExtractor(instance.fieldName() + "mutated", null, true, instance.hitName(), false);
|
||||
return new FieldHitExtractor(
|
||||
instance.fieldName() + "mutated",
|
||||
randomValueOtherThan(instance.dataType(), () -> randomFrom(DataType.values())),
|
||||
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone),
|
||||
randomBoolean(),
|
||||
instance.hitName() + "mutated",
|
||||
randomBoolean());
|
||||
}
|
||||
|
||||
public void testGetDottedValueWithDocValues() {
|
||||
|
@ -60,7 +69,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
String child = randomAlphaOfLength(5);
|
||||
String fieldName = grandparent + "." + parent + "." + child;
|
||||
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, true);
|
||||
FieldHitExtractor extractor = getFieldHitExtractor(fieldName, true);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -84,7 +93,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
String child = randomAlphaOfLength(5);
|
||||
String fieldName = grandparent + "." + parent + "." + child;
|
||||
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, false);
|
||||
FieldHitExtractor extractor = getFieldHitExtractor(fieldName, false);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -123,7 +132,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
public void testGetDocValue() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, true);
|
||||
FieldHitExtractor extractor = getFieldHitExtractor(fieldName, true);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -140,18 +149,19 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testGetDate() {
|
||||
ZoneId zoneId = randomZone();
|
||||
long millis = 1526467911780L;
|
||||
List<Object> documentFieldValues = Collections.singletonList(Long.toString(millis));
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField("my_date_field", documentFieldValues);
|
||||
hit.fields(singletonMap("my_date_field", field));
|
||||
FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATETIME, true);
|
||||
assertEquals(DateUtils.asDateTime(millis), extractor.extract(hit));
|
||||
FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATETIME, zoneId, true);
|
||||
assertEquals(DateUtils.asDateTime(millis, zoneId), extractor.extract(hit));
|
||||
}
|
||||
|
||||
public void testGetSource() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, null, false);
|
||||
FieldHitExtractor extractor = getFieldHitExtractor(fieldName, false);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
@ -174,12 +184,13 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("hit.field@hit", new FieldHitExtractor("hit.field", null, true, "hit", false).toString());
|
||||
assertEquals("hit.field@hit@Europe/Berlin",
|
||||
new FieldHitExtractor("hit.field", null, ZoneId.of("Europe/Berlin"), true, "hit", false).toString());
|
||||
}
|
||||
|
||||
public void testMultiValuedDocValue() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, true);
|
||||
FieldHitExtractor fe = getFieldHitExtractor(fieldName, true);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField(fieldName, asList("a", "b"));
|
||||
hit.fields(singletonMap(fieldName, field));
|
||||
|
@ -189,7 +200,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
public void testMultiValuedSourceValue() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor(fieldName, false);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
source.startObject(); {
|
||||
|
@ -204,7 +215,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
public void testSingleValueArrayInSource() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor(fieldName, false);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
Object value = randomValue();
|
||||
|
@ -218,14 +229,14 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testExtractSourcePath() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b", singletonMap("c", value)));
|
||||
assertThat(fe.extractFromSource(map), is(value));
|
||||
}
|
||||
|
||||
public void testExtractSourceIncorrectPath() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b", singletonMap("c", value)));
|
||||
SqlException ex = expectThrows(SqlException.class, () -> fe.extractFromSource(map));
|
||||
|
@ -233,7 +244,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testMultiValuedSource() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a", false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", asList(value, value));
|
||||
SqlException ex = expectThrows(SqlException.class, () -> fe.extractFromSource(map));
|
||||
|
@ -241,7 +252,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testMultiValuedSourceAllowed() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a", null, false, true);
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a", null, UTC, false, true);
|
||||
Object valueA = randomValue();
|
||||
Object valueB = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", asList(valueA, valueB));
|
||||
|
@ -249,28 +260,28 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldWithDots() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b", false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a.b", value);
|
||||
assertEquals(value, fe.extractFromSource(map));
|
||||
}
|
||||
|
||||
public void testNestedFieldWithDots() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b.c", value));
|
||||
assertEquals(value, fe.extractFromSource(map));
|
||||
}
|
||||
|
||||
public void testNestedFieldWithDotsWithNestedField() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d", false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b.c", singletonMap("d", value)));
|
||||
assertEquals(value, fe.extractFromSource(map));
|
||||
}
|
||||
|
||||
public void testNestedFieldWithDotsWithNestedFieldWithDots() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d.e", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d.e", false);
|
||||
Object value = randomValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b.c", singletonMap("d.e", value)));
|
||||
assertEquals(value, fe.extractFromSource(map));
|
||||
|
@ -284,7 +295,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
path[i] = randomAlphaOfLength(randomIntBetween(1, 10));
|
||||
sj.add(path[i]);
|
||||
}
|
||||
FieldHitExtractor fe = new FieldHitExtractor(sj.toString(), null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor(sj.toString(), false);
|
||||
|
||||
List<String> paths = new ArrayList<>(path.length);
|
||||
int start = 0;
|
||||
|
@ -337,7 +348,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testExtractSourceIncorrectPathWithFieldWithDots() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d.e", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d.e", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b.c", singletonMap("d", value)));
|
||||
SqlException ex = expectThrows(SqlException.class, () -> fe.extractFromSource(map));
|
||||
|
@ -345,8 +356,8 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldWithDotsAndCommonPrefix() {
|
||||
FieldHitExtractor fe1 = new FieldHitExtractor("a.d", null, false);
|
||||
FieldHitExtractor fe2 = new FieldHitExtractor("a.b.c", null, false);
|
||||
FieldHitExtractor fe1 = getFieldHitExtractor("a.d", false);
|
||||
FieldHitExtractor fe2 = getFieldHitExtractor("a.b.c", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("a", singletonMap("d", value));
|
||||
|
@ -356,8 +367,8 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldWithDotsAndCommonPrefixes() {
|
||||
FieldHitExtractor fe1 = new FieldHitExtractor("a1.b.c.d1.e.f.g1", null, false);
|
||||
FieldHitExtractor fe2 = new FieldHitExtractor("a2.b.c.d2.e.f.g2", null, false);
|
||||
FieldHitExtractor fe1 = getFieldHitExtractor("a1.b.c.d1.e.f.g1", false);
|
||||
FieldHitExtractor fe2 = getFieldHitExtractor("a2.b.c.d2.e.f.g2", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("a1", singletonMap("b.c", singletonMap("d1", singletonMap("e.f", singletonMap("g1", value)))));
|
||||
|
@ -367,7 +378,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldWithDotsAndSamePathButDifferentHierarchy() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d.e.f.g", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c.d.e.f.g", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("a.b", singletonMap("c", singletonMap("d.e", singletonMap("f.g", value))));
|
||||
|
@ -377,7 +388,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldsWithSingleValueArrayAsSubfield() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
// "a" : [{"b" : "value"}]
|
||||
|
@ -386,7 +397,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldsWithMultiValueArrayAsSubfield() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b", false);
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
// "a" : [{"b" : "value1"}, {"b" : "value2"}]
|
||||
map.put("a", asList(singletonMap("b", randomNonNullValue()), singletonMap("b", randomNonNullValue())));
|
||||
|
@ -395,7 +406,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
// "a" : [{"b" : [{"c" : "value"}]}]
|
||||
|
@ -404,7 +415,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldsWithMultiValueArrayAsSubfield_ThreeNestedLists() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false);
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
// "a" : [{"b" : [{"c" : ["value1", "value2"]}]}]
|
||||
map.put("a", singletonList(singletonMap("b", singletonList(singletonMap("c", asList("value1", "value2"))))));
|
||||
|
@ -413,7 +424,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testFieldsWithSingleValueArrayAsSubfield_TwoNestedLists2() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor("a.b.c", false);
|
||||
Object value = randomNonNullValue();
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
// "a" : [{"b" : {"c" : ["value"]}]}]
|
||||
|
@ -423,7 +434,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
|
||||
public void testObjectsForSourceValue() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, null, false);
|
||||
FieldHitExtractor fe = getFieldHitExtractor(fieldName, false);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
source.startObject(); {
|
||||
|
@ -439,6 +450,10 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
assertThat(ex.getMessage(), is("Objects (returned by [" + fieldName + "]) are not supported"));
|
||||
}
|
||||
|
||||
private FieldHitExtractor getFieldHitExtractor(String fieldName, boolean useDocValue) {
|
||||
return new FieldHitExtractor(fieldName, null, UTC, useDocValue);
|
||||
}
|
||||
|
||||
private Object randomValue() {
|
||||
Supplier<Object> value = randomFrom(Arrays.asList(
|
||||
() -> randomAlphaOfLength(10),
|
||||
|
|
|
@ -15,20 +15,23 @@ import org.elasticsearch.search.aggregations.Aggregations;
|
|||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalTopHits;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.SqlException;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.util.DateUtils;
|
||||
|
||||
import java.time.ZoneId;
|
||||
import java.util.Collections;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.elasticsearch.xpack.sql.util.DateUtils.UTC;
|
||||
|
||||
public class TopHitsAggExtractorTests extends AbstractWireSerializingTestCase<TopHitsAggExtractor> {
|
||||
|
||||
public static TopHitsAggExtractor randomTopHitsAggExtractor() {
|
||||
return new TopHitsAggExtractor(randomAlphaOfLength(16), randomFrom(DataType.values()));
|
||||
return new TopHitsAggExtractor(randomAlphaOfLength(16), randomFrom(DataType.values()), randomZone());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -43,7 +46,10 @@ public class TopHitsAggExtractorTests extends AbstractWireSerializingTestCase<To
|
|||
|
||||
@Override
|
||||
protected TopHitsAggExtractor mutateInstance(TopHitsAggExtractor instance) {
|
||||
return new TopHitsAggExtractor(instance.name() + "mutated", randomFrom(DataType.values()));
|
||||
return new TopHitsAggExtractor(
|
||||
instance.name() + "mutated",
|
||||
randomValueOtherThan(instance.fieldDataType(), () -> randomFrom(DataType.values())),
|
||||
randomValueOtherThan(instance.zoneId(), ESTestCase::randomZone));
|
||||
}
|
||||
|
||||
public void testNoAggs() {
|
||||
|
@ -63,7 +69,7 @@ public class TopHitsAggExtractorTests extends AbstractWireSerializingTestCase<To
|
|||
}
|
||||
|
||||
public void testExtractValue() {
|
||||
TopHitsAggExtractor extractor = new TopHitsAggExtractor("topHitsAgg", DataType.KEYWORD);
|
||||
TopHitsAggExtractor extractor = new TopHitsAggExtractor("topHitsAgg", DataType.KEYWORD, UTC);
|
||||
|
||||
String value = "Str_Value";
|
||||
Aggregation agg = new InternalTopHits(extractor.name(), 0, 1, null, searchHitsOf(value), null, null);
|
||||
|
@ -72,12 +78,13 @@ public class TopHitsAggExtractorTests extends AbstractWireSerializingTestCase<To
|
|||
}
|
||||
|
||||
public void testExtractDateValue() {
|
||||
TopHitsAggExtractor extractor = new TopHitsAggExtractor("topHitsAgg", DataType.DATETIME);
|
||||
ZoneId zoneId = randomZone();
|
||||
TopHitsAggExtractor extractor = new TopHitsAggExtractor("topHitsAgg", DataType.DATETIME, zoneId);
|
||||
|
||||
long value = 123456789L;
|
||||
Aggregation agg = new InternalTopHits(extractor.name(), 0, 1, null, searchHitsOf(value), null, null);
|
||||
Bucket bucket = new TestBucket(emptyMap(), 0, new Aggregations(singletonList(agg)));
|
||||
assertEquals(DateUtils.asDateTime(value), extractor.extract(bucket));
|
||||
assertEquals(DateUtils.asDateTime(value, zoneId), extractor.extract(bucket));
|
||||
}
|
||||
|
||||
private SearchHits searchHitsOf(Object value) {
|
||||
|
|
|
@ -243,7 +243,7 @@ public class SysTablesTests extends ESTestCase {
|
|||
IndexResolver resolver = mock(IndexResolver.class);
|
||||
when(resolver.clusterName()).thenReturn(CLUSTER_NAME);
|
||||
|
||||
SqlSession session = new SqlSession(null, null, null, resolver, null, null, null, null, null);
|
||||
SqlSession session = new SqlSession(TestUtils.TEST_CFG, null, null, resolver, null, null, null, null, null);
|
||||
return new Tuple<>(cmd, session);
|
||||
}
|
||||
|
||||
|
|
|
@ -36,11 +36,11 @@ public class SysTypesTests extends ESTestCase {
|
|||
Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), false);
|
||||
|
||||
IndexResolver resolver = mock(IndexResolver.class);
|
||||
SqlSession session = new SqlSession(null, null, null, resolver, null, null, null, null, null);
|
||||
SqlSession session = new SqlSession(TestUtils.TEST_CFG, null, null, resolver, null, null, null, null, null);
|
||||
return new Tuple<>(cmd, session);
|
||||
}
|
||||
|
||||
public void testSysTypes() throws Exception {
|
||||
public void testSysTypes() {
|
||||
Command cmd = sql("SYS TYPES").v1();
|
||||
|
||||
List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE",
|
||||
|
@ -69,7 +69,7 @@ public class SysTypesTests extends ESTestCase {
|
|||
}, ex -> fail(ex.getMessage())));
|
||||
}
|
||||
|
||||
public void testSysTypesDefaultFiltering() throws Exception {
|
||||
public void testSysTypesDefaultFiltering() {
|
||||
Command cmd = sql("SYS TYPES 0").v1();
|
||||
|
||||
cmd.execute(null, wrap(r -> {
|
||||
|
@ -77,7 +77,7 @@ public class SysTypesTests extends ESTestCase {
|
|||
}, ex -> fail(ex.getMessage())));
|
||||
}
|
||||
|
||||
public void testSysTypesPositiveFiltering() throws Exception {
|
||||
public void testSysTypesPositiveFiltering() {
|
||||
// boolean = 16
|
||||
Command cmd = sql("SYS TYPES " + JDBCType.BOOLEAN.getVendorTypeNumber()).v1();
|
||||
|
||||
|
@ -87,7 +87,7 @@ public class SysTypesTests extends ESTestCase {
|
|||
}, ex -> fail(ex.getMessage())));
|
||||
}
|
||||
|
||||
public void testSysTypesNegativeFiltering() throws Exception {
|
||||
public void testSysTypesNegativeFiltering() {
|
||||
Command cmd = sql("SYS TYPES " + JDBCType.TINYINT.getVendorTypeNumber()).v1();
|
||||
|
||||
cmd.execute(null, wrap(r -> {
|
||||
|
@ -96,7 +96,7 @@ public class SysTypesTests extends ESTestCase {
|
|||
}, ex -> fail(ex.getMessage())));
|
||||
}
|
||||
|
||||
public void testSysTypesMultipleMatches() throws Exception {
|
||||
public void testSysTypesMultipleMatches() {
|
||||
Command cmd = sql("SYS TYPES " + JDBCType.VARCHAR.getVendorTypeNumber()).v1();
|
||||
|
||||
cmd.execute(null, wrap(r -> {
|
||||
|
|
Loading…
Reference in New Issue