SQL: Fix esType for DATETIME/DATE and INTERVALS (#38179)

Since introduction of data types that don't have a corresponding type
in ES the `esType` is error-prone when used for `unmappedType()` calls.
Moreover since the renaming of `DATE` to `DATETIME` and the introduction
of an actual date-only `DATE` the `esType` would return `datetime` which
is not a valid type for ES mapping.

Fixes: #38051
This commit is contained in:
Marios Trivyzas 2019-02-05 23:12:52 +02:00 committed by GitHub
parent 1f4f6f35c8
commit 2c30501c74
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 86 additions and 80 deletions

View File

@ -219,7 +219,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
// compound fields
else if (allowCompound == false && fa.dataType().isPrimitive() == false) {
named = u.withUnresolvedMessage(
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().esType + "] only its subfields");
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().typeName + "] only its subfields");
}
}
return named;
@ -1228,4 +1228,4 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
return true;
}
}
}
}

View File

@ -682,7 +682,7 @@ public final class Verifier {
for (Expression value : in.list()) {
if (areTypesCompatible(dt, value.dataType()) == false) {
localFailures.add(fail(value, "expected data type [{}], value provided is of type [{}]",
dt.esType, value.dataType().esType));
dt.typeName, value.dataType().typeName));
return;
}
}
@ -703,7 +703,7 @@ public final class Verifier {
} else {
if (areTypesCompatible(dt, child.dataType()) == false) {
localFailures.add(fail(child, "expected data type [{}], value provided is of type [{}]",
dt.esType, child.dataType().esType));
dt.typeName, child.dataType().typeName));
return;
}
}

View File

@ -175,4 +175,4 @@ public abstract class SourceGenerator {
builder.storedFields(NO_STORED_FIELD);
}
}
}
}

View File

@ -87,7 +87,7 @@ public class FieldHitExtractor implements HitExtractor {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeOptionalString(dataType == null ? null : dataType.esType);
out.writeOptionalString(dataType == null ? null : dataType.typeName);
out.writeBoolean(useDocValue);
out.writeOptionalString(hitName);
}

View File

@ -191,7 +191,7 @@ public final class Expressions {
paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT),
acceptedTypesForErrorMsg(acceptedTypes),
Expressions.name(e),
e.dataType().esType));
e.dataType().typeName));
}
private static String acceptedTypesForErrorMsg(String... acceptedTypes) {

View File

@ -46,7 +46,7 @@ public abstract class TopHits extends AggregateFunction {
((FieldAttribute) field()).exactAttribute();
} catch (MappingException ex) {
return new TypeResolution(format(null, "[{}] cannot operate on first argument field of data type [{}]",
functionName(), field().dataType().esType));
functionName(), field().dataType().typeName));
}
if (orderField() != null) {
@ -59,7 +59,7 @@ public abstract class TopHits extends AggregateFunction {
((FieldAttribute) orderField()).exactAttribute();
} catch (MappingException ex) {
return new TypeResolution(format(null, "[{}] cannot operate on second argument field of data type [{}]",
functionName(), orderField().dataType().esType));
functionName(), orderField().dataType().typeName));
}
}
return TypeResolution.TYPE_RESOLVED;

View File

@ -7,8 +7,8 @@ package org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.type.DataTypes;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
@ -36,7 +36,7 @@ public class Sub extends DateTimeArithmeticOperation {
protected TypeResolution resolveWithIntervals() {
if (right().dataType().isDateBased() && DataTypes.isInterval(left().dataType())) {
return new TypeResolution(format(null, "Cannot subtract a {}[{}] from an interval[{}]; do you mean the reverse?",
right().dataType().esType, right().source().text(), left().source().text()));
right().dataType().typeName, right().source().text(), left().source().text()));
}
return TypeResolution.TYPE_RESOLVED;
}

View File

@ -81,7 +81,7 @@ public class ShowColumns extends Command {
String name = e.getKey();
if (dt != null) {
// show only fields that exist in ES
rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName(), dt.esType));
rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName(), dt.typeName));
if (field.getProperties().isEmpty() == false) {
String newPrefix = prefix != null ? prefix + "." + name : name;
fillInRows(field.getProperties(), newPrefix, rows);

View File

@ -24,7 +24,6 @@ import org.elasticsearch.xpack.sql.type.EsField;
import java.sql.DatabaseMetaData;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;
@ -143,7 +142,7 @@ public class SysColumns extends Command {
indexName,
name,
odbcCompatible(type.sqlType.getVendorTypeNumber(), isOdbcClient),
type.esType.toUpperCase(Locale.ROOT),
type.toString(),
type.displaySize,
// TODO: is the buffer_length correct?
type.size,
@ -208,4 +207,4 @@ public class SysColumns extends Command {
&& Objects.equals(pattern, other.pattern)
&& Objects.equals(columnPattern, other.columnPattern);
}
}
}

View File

@ -11,15 +11,14 @@ import org.elasticsearch.xpack.sql.plan.logical.command.Command;
import org.elasticsearch.xpack.sql.session.Rows;
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.tree.Source;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
import java.sql.DatabaseMetaData;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
@ -81,7 +80,7 @@ public class SysTypes extends Command {
List<List<?>> rows = values
// sort by SQL int type (that's what the JDBC/ODBC specs want) followed by name
.sorted(Comparator.comparing((DataType t) -> t.sqlType.getVendorTypeNumber()).thenComparing(DataType::sqlName))
.map(t -> asList(t.esType.toUpperCase(Locale.ROOT),
.map(t -> asList(t.toString(),
t.sqlType.getVendorTypeNumber(),
//https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/column-size?view=sql-server-2017
t.defaultPrecision,
@ -132,4 +131,4 @@ public class SysTypes extends Command {
return type.equals(((SysTypes) obj).type);
}
}
}

View File

@ -87,9 +87,9 @@ public class TransportSqlQueryAction extends HandledTransportAction<SqlQueryRequ
List<ColumnInfo> columns = new ArrayList<>(rowSet.columnCount());
for (Schema.Entry entry : rowSet.schema()) {
if (Mode.isDriver(request.mode())) {
columns.add(new ColumnInfo("", entry.name(), entry.type().esType, entry.type().displaySize));
columns.add(new ColumnInfo("", entry.name(), entry.type().typeName, entry.type().displaySize));
} else {
columns.add(new ColumnInfo("", entry.name(), entry.type().esType));
columns.add(new ColumnInfo("", entry.name(), entry.type().typeName));
}
}
columns = unmodifiableList(columns);

View File

@ -33,6 +33,6 @@ public class TopHitsAggRef extends AggRef {
@Override
public String toString() {
return ">" + name + "[" + fieldDataType.esType + "]";
return ">" + name + "[" + fieldDataType.typeName + "]";
}
}

View File

@ -23,41 +23,41 @@ import java.util.Map;
public enum DataType {
// @formatter:off
// jdbc type, size, defPrecision,dispSize, int, rat, docvals
NULL( JDBCType.NULL, 0, 0, 0, false, false, false),
UNSUPPORTED( JDBCType.OTHER, 0, 0, 0, false, false, false),
BOOLEAN( JDBCType.BOOLEAN, 1, 1, 1, false, false, false),
BYTE( JDBCType.TINYINT, Byte.BYTES, 3, 5, true, false, true),
SHORT( JDBCType.SMALLINT, Short.BYTES, 5, 6, true, false, true),
INTEGER( JDBCType.INTEGER, Integer.BYTES, 10, 11, true, false, true),
LONG( JDBCType.BIGINT, Long.BYTES, 19, 20, true, false, true),
// esType jdbc type, size, defPrecision,dispSize, int, rat, docvals
NULL( "null", JDBCType.NULL, 0, 0, 0, false, false, false),
UNSUPPORTED( JDBCType.OTHER, 0, 0, 0, false, false, false),
BOOLEAN( "boolean", JDBCType.BOOLEAN, 1, 1, 1, false, false, false),
BYTE( "byte", JDBCType.TINYINT, Byte.BYTES, 3, 5, true, false, true),
SHORT( "short", JDBCType.SMALLINT, Short.BYTES, 5, 6, true, false, true),
INTEGER( "integer", JDBCType.INTEGER, Integer.BYTES, 10, 11, true, false, true),
LONG( "long", JDBCType.BIGINT, Long.BYTES, 19, 20, true, false, true),
// 53 bits defaultPrecision ~ 15(15.95) decimal digits (53log10(2)),
DOUBLE( JDBCType.DOUBLE, Double.BYTES, 15, 25, false, true, true),
DOUBLE( "double", JDBCType.DOUBLE, Double.BYTES, 15, 25, false, true, true),
// 24 bits defaultPrecision - 24*log10(2) =~ 7 (7.22)
FLOAT( JDBCType.REAL, Float.BYTES, 7, 15, false, true, true),
HALF_FLOAT( JDBCType.FLOAT, Double.BYTES, 16, 25, false, true, true),
FLOAT( "float", JDBCType.REAL, Float.BYTES, 7, 15, false, true, true),
HALF_FLOAT( "half_float", JDBCType.FLOAT, Double.BYTES, 16, 25, false, true, true),
// precision is based on long
SCALED_FLOAT( JDBCType.FLOAT, Double.BYTES, 19, 25, false, true, true),
KEYWORD( JDBCType.VARCHAR, Integer.MAX_VALUE, 256, 0, false, false, true),
TEXT( JDBCType.VARCHAR, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false),
OBJECT( JDBCType.STRUCT, -1, 0, 0, false, false, false),
NESTED( JDBCType.STRUCT, -1, 0, 0, false, false, false),
BINARY( JDBCType.VARBINARY, -1, Integer.MAX_VALUE, 0, false, false, false),
DATE( JDBCType.DATE, Long.BYTES, 10, 10, false, false, true),
SCALED_FLOAT( "scaled_float", JDBCType.FLOAT, Double.BYTES, 19, 25, false, true, true),
KEYWORD( "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE, 256, 0, false, false, true),
TEXT( "text", JDBCType.VARCHAR, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false),
OBJECT( "object", JDBCType.STRUCT, -1, 0, 0, false, false, false),
NESTED( "nested", JDBCType.STRUCT, -1, 0, 0, false, false, false),
BINARY( "binary", JDBCType.VARBINARY, -1, Integer.MAX_VALUE, 0, false, false, false),
DATE( JDBCType.DATE, Long.BYTES, 10, 10, false, false, true),
// since ODBC and JDBC interpret precision for Date as display size
// the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone)
// see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288
DATETIME( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true),
DATETIME( "date", JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true),
//
// specialized types
//
// IP can be v4 or v6. The latter has 2^128 addresses or 340,282,366,920,938,463,463,374,607,431,768,211,456
// aka 39 chars
IP( JDBCType.VARCHAR, 39, 39, 0, false, false, true),
IP( "ip", JDBCType.VARCHAR, 39, 39, 0, false, false, true),
//
// INTERVALS
// the list is long as there are a lot of variations and that's what clients (ODBC) expect
// jdbc type, size, prec,disp, int, rat, docvals
// esType:null jdbc type, size, prec,disp, int, rat, docvals
INTERVAL_YEAR( ExtTypes.INTERVAL_YEAR, Integer.BYTES, 7, 7, false, false, false),
INTERVAL_MONTH( ExtTypes.INTERVAL_MONTH, Integer.BYTES, 7, 7, false, false, false),
INTERVAL_DAY( ExtTypes.INTERVAL_DAY, Long.BYTES, 23, 23, false, false, false),
@ -126,7 +126,12 @@ public enum DataType {
/**
* Elasticsearch type name
* Type's name used for error messages and column info for the clients
*/
public final String typeName;
/**
* Elasticsearch data type that it maps to
*/
public final String esType;
@ -176,7 +181,13 @@ public enum DataType {
DataType(SQLType sqlType, int size, int defaultPrecision, int displaySize, boolean isInteger,
boolean isRational, boolean defaultDocValues) {
this.esType = name().toLowerCase(Locale.ROOT);
this(null, sqlType, size, defaultPrecision, displaySize, isInteger, isRational, defaultDocValues);
}
DataType(String esType, SQLType sqlType, int size, int defaultPrecision, int displaySize, boolean isInteger,
boolean isRational, boolean defaultDocValues) {
this.typeName = name().toLowerCase(Locale.ROOT);
this.esType = esType;
this.sqlType = sqlType;
this.size = size;
this.defaultPrecision = defaultPrecision;
@ -228,8 +239,6 @@ public enum DataType {
/**
* Creates returns DataType enum corresponding to the specified es type
* <p>
* For any dataType DataType.fromTypeName(dataType.esType) == dataType
*/
public static DataType fromTypeName(String esType) {
String uppercase = esType.toUpperCase(Locale.ROOT);

View File

@ -120,9 +120,9 @@ public class Schema implements Iterable<Entry> {
}
sb.append(names.get(i));
sb.append(":");
sb.append(types.get(i).esType);
sb.append(types.get(i).typeName);
}
sb.append("]");
return sb.toString();
}
}
}

View File

@ -154,7 +154,7 @@ public class IndexResolverTests extends ESTestCase {
if (entry.getValue().size() > 1) {
for (EsIndex index : indices) {
EsField field = index.mapping().get(fieldName);
UpdateableFieldCapabilities fieldCaps = (UpdateableFieldCapabilities) caps.get(field.getDataType().esType);
UpdateableFieldCapabilities fieldCaps = (UpdateableFieldCapabilities) caps.get(field.getDataType().typeName);
fieldCaps.indices.add(index.name());
}
//TODO: what about nonAgg/SearchIndices?
@ -171,7 +171,7 @@ public class IndexResolverTests extends ESTestCase {
map = new HashMap<>();
merged.put(fieldName, map);
}
FieldCapabilities caps = map.computeIfAbsent(field.getDataType().esType,
FieldCapabilities caps = map.computeIfAbsent(field.getDataType().typeName,
esType -> new UpdateableFieldCapabilities(fieldName, esType,
isSearchable(field.getDataType()),
isAggregatable(field.getDataType())));

View File

@ -28,7 +28,7 @@ public class ParameterTests extends ESTestCase {
public void testSingleParameter() {
Expression expression = new SqlParser().createExpression("a = \n?",
Collections.singletonList(
new SqlTypedParamValue(DataType.KEYWORD.esType, "foo")
new SqlTypedParamValue(DataType.KEYWORD.typeName, "foo")
));
logger.info(expression);
assertThat(expression, instanceOf(Equals.class));
@ -42,10 +42,10 @@ public class ParameterTests extends ESTestCase {
public void testMultipleParameters() {
Expression expression = new SqlParser().createExpression("(? + ? * ?) - ?", Arrays.asList(
new SqlTypedParamValue(DataType.LONG.esType, 1L),
new SqlTypedParamValue(DataType.LONG.esType, 2L),
new SqlTypedParamValue(DataType.LONG.esType, 3L),
new SqlTypedParamValue(DataType.LONG.esType, 4L)
new SqlTypedParamValue(DataType.LONG.typeName, 1L),
new SqlTypedParamValue(DataType.LONG.typeName, 2L),
new SqlTypedParamValue(DataType.LONG.typeName, 3L),
new SqlTypedParamValue(DataType.LONG.typeName, 4L)
));
assertThat(expression, instanceOf(Sub.class));
Sub sub = (Sub) expression;
@ -62,9 +62,9 @@ public class ParameterTests extends ESTestCase {
public void testNotEnoughParameters() {
ParsingException ex = expectThrows(ParsingException.class,
() -> new SqlParser().createExpression("(? + ? * ?) - ?", Arrays.asList(
new SqlTypedParamValue(DataType.LONG.esType, 1L),
new SqlTypedParamValue(DataType.LONG.esType, 2L),
new SqlTypedParamValue(DataType.LONG.esType, 3L)
new SqlTypedParamValue(DataType.LONG.typeName, 1L),
new SqlTypedParamValue(DataType.LONG.typeName, 2L),
new SqlTypedParamValue(DataType.LONG.typeName, 3L)
)));
assertThat(ex.getMessage(), containsString("Not enough actual parameters"));
}

View File

@ -144,9 +144,9 @@ public class EscapedFunctionsTests extends ESTestCase {
public void testFunctionWithFunctionWithArgAndParams() {
String e = "POWER(?, {fn POWER({fn ABS(?)}, {fN ABS(?)})})";
Function f = (Function) parser.createExpression(e,
asList(new SqlTypedParamValue(DataType.LONG.esType, 1),
new SqlTypedParamValue(DataType.LONG.esType, 1),
new SqlTypedParamValue(DataType.LONG.esType, 1)));
asList(new SqlTypedParamValue(DataType.LONG.typeName, 1),
new SqlTypedParamValue(DataType.LONG.typeName, 1),
new SqlTypedParamValue(DataType.LONG.typeName, 1)));
assertEquals(e, f.sourceText());
assertEquals(2, f.arguments().size());

View File

@ -13,11 +13,10 @@ import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
import org.elasticsearch.xpack.sql.type.DataType;
import static java.util.Collections.singletonList;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
public class LikeEscapingParsingTests extends ESTestCase {
private final SqlParser parser = new SqlParser();
@ -33,7 +32,7 @@ public class LikeEscapingParsingTests extends ESTestCase {
Expression exp = null;
boolean parameterized = randomBoolean();
if (parameterized) {
exp = parser.createExpression("exp LIKE ?", singletonList(new SqlTypedParamValue(DataType.KEYWORD.esType, pattern)));
exp = parser.createExpression("exp LIKE ?", singletonList(new SqlTypedParamValue(DataType.KEYWORD.typeName, pattern)));
} else {
exp = parser.createExpression(format(null, "exp LIKE '{}'", pattern));
}

View File

@ -231,7 +231,7 @@ public class SysTablesTests extends ESTestCase {
}
private SqlTypedParamValue param(Object value) {
return new SqlTypedParamValue(DataTypes.fromJava(value).esType, value);
return new SqlTypedParamValue(DataTypes.fromJava(value).typeName, value);
}
private Tuple<Command, SqlSession> sql(String sql, List<SqlTypedParamValue> params) {

View File

@ -632,16 +632,16 @@ public class QueryTranslatorTests extends ESTestCase {
"\"sort\":[{\"keyword\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"keyword\"}}]}}}}}"));
}
{
PhysicalPlan p = optimizeAndPlan("SELECT LAST(keyword) FROM test");
PhysicalPlan p = optimizeAndPlan("SELECT LAST(date) FROM test");
assertEquals(EsQueryExec.class, p.getClass());
EsQueryExec eqe = (EsQueryExec) p;
assertEquals(1, eqe.output().size());
assertEquals("LAST(keyword)", eqe.output().get(0).qualifiedName());
assertTrue(eqe.output().get(0).dataType() == DataType.KEYWORD);
assertEquals("LAST(date)", eqe.output().get(0).qualifiedName());
assertTrue(eqe.output().get(0).dataType() == DataType.DATETIME);
assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""),
endsWith("\"top_hits\":{\"from\":0,\"size\":1,\"version\":false,\"seq_no_primary_term\":false," +
"\"explain\":false,\"docvalue_fields\":[{\"field\":\"keyword\"}]," +
"\"sort\":[{\"keyword\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"keyword\"}}]}}}}}"));
"\"explain\":false,\"docvalue_fields\":[{\"field\":\"date\",\"format\":\"epoch_millis\"}]," +
"\"sort\":[{\"date\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"date\"}}]}}}}}"));
}
}
@ -661,17 +661,17 @@ public class QueryTranslatorTests extends ESTestCase {
}
{
PhysicalPlan p = optimizeAndPlan("SELECT LAST(keyword, int) FROM test");
PhysicalPlan p = optimizeAndPlan("SELECT LAST(date, int) FROM test");
assertEquals(EsQueryExec.class, p.getClass());
EsQueryExec eqe = (EsQueryExec) p;
assertEquals(1, eqe.output().size());
assertEquals("LAST(keyword, int)", eqe.output().get(0).qualifiedName());
assertTrue(eqe.output().get(0).dataType() == DataType.KEYWORD);
assertEquals("LAST(date, int)", eqe.output().get(0).qualifiedName());
assertTrue(eqe.output().get(0).dataType() == DataType.DATETIME);
assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""),
endsWith("\"top_hits\":{\"from\":0,\"size\":1,\"version\":false,\"seq_no_primary_term\":false," +
"\"explain\":false,\"docvalue_fields\":[{\"field\":\"keyword\"}]," +
"\"explain\":false,\"docvalue_fields\":[{\"field\":\"date\",\"format\":\"epoch_millis\"}]," +
"\"sort\":[{\"int\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"integer\"}}," +
"{\"keyword\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"keyword\"}}]}}}}}"));
"{\"date\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"date\"}}]}}}}}"));
}
}
}

View File

@ -463,7 +463,7 @@ public class DataTypeConversionTests extends ESTestCase {
public void testEsDataTypes() {
for (DataType type : values()) {
if (type != DATE) { // Doesn't have a corresponding type in ES
assertEquals(type, fromTypeName(type.esType));
assertEquals(type, fromTypeName(type.typeName));
}
}
}

View File

@ -171,20 +171,20 @@ public class TypesTests extends ESTestCase {
public void testGeoField() {
Map<String, EsField> mapping = loadMapping("mapping-geo.json");
EsField dt = mapping.get("location");
assertThat(dt.getDataType().esType, is("unsupported"));
assertThat(dt.getDataType().typeName, is("unsupported"));
}
public void testIpField() {
Map<String, EsField> mapping = loadMapping("mapping-ip.json");
assertThat(mapping.size(), is(1));
EsField dt = mapping.get("ip_addr");
assertThat(dt.getDataType().esType, is("ip"));
assertThat(dt.getDataType().typeName, is("ip"));
}
public void testUnsupportedTypes() {
Map<String, EsField> mapping = loadMapping("mapping-unsupported.json");
EsField dt = mapping.get("range");
assertThat(dt.getDataType().esType, is("unsupported"));
assertThat(dt.getDataType().typeName, is("unsupported"));
}
public static Map<String, EsField> loadMapping(String name) {