SQL: Refactor the type handling (elastic/x-pack-elasticsearch#3694)
Separates ES types and field metadata. Relates to elastic/x-pack-elasticsearch#3556 Original commit: elastic/x-pack-elasticsearch@c9601f3390
This commit is contained in:
parent
3f4c789993
commit
36bd849cd3
|
@ -42,38 +42,6 @@ import static java.sql.Types.VARCHAR;
|
||||||
|
|
||||||
public abstract class JdbcUtils {
|
public abstract class JdbcUtils {
|
||||||
|
|
||||||
public static Class<?> asPrimitive(Class<?> wrapperClass) {
|
|
||||||
if (Boolean.class == wrapperClass) {
|
|
||||||
return boolean.class;
|
|
||||||
}
|
|
||||||
if (Byte.class == wrapperClass) {
|
|
||||||
return byte.class;
|
|
||||||
}
|
|
||||||
if (Short.class == wrapperClass) {
|
|
||||||
return short.class;
|
|
||||||
}
|
|
||||||
if (Character.class == wrapperClass) {
|
|
||||||
return char.class;
|
|
||||||
}
|
|
||||||
if (Integer.class == wrapperClass) {
|
|
||||||
return int.class;
|
|
||||||
}
|
|
||||||
if (Long.class == wrapperClass) {
|
|
||||||
return long.class;
|
|
||||||
}
|
|
||||||
if (Double.class == wrapperClass) {
|
|
||||||
return double.class;
|
|
||||||
}
|
|
||||||
if (Float.class == wrapperClass) {
|
|
||||||
return float.class;
|
|
||||||
}
|
|
||||||
if (Void.class == wrapperClass) {
|
|
||||||
return void.class;
|
|
||||||
}
|
|
||||||
|
|
||||||
return wrapperClass;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static int fromClass(Class<?> clazz) throws JdbcSQLException {
|
public static int fromClass(Class<?> clazz) throws JdbcSQLException {
|
||||||
if (clazz == null) {
|
if (clazz == null) {
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
|
@ -48,6 +48,7 @@ import org.elasticsearch.xpack.sql.tree.Node;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
|
import org.elasticsearch.xpack.sql.type.UnsupportedEsField;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
@ -353,12 +354,14 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
||||||
if (named instanceof FieldAttribute) {
|
if (named instanceof FieldAttribute) {
|
||||||
FieldAttribute fa = (FieldAttribute) named;
|
FieldAttribute fa = (FieldAttribute) named;
|
||||||
if (DataTypes.isUnsupported(fa.dataType())) {
|
if (DataTypes.isUnsupported(fa.dataType())) {
|
||||||
|
UnsupportedEsField unsupportedField = (UnsupportedEsField) fa.field();
|
||||||
named = u.withUnresolvedMessage(
|
named = u.withUnresolvedMessage(
|
||||||
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().esName() + "] as is unsupported");
|
"Cannot use field [" + fa.name() + "] type [" + unsupportedField.getOriginalType() +
|
||||||
|
"] as is unsupported");
|
||||||
}
|
}
|
||||||
else if (!fa.dataType().isPrimitive()) {
|
else if (!fa.dataType().isPrimitive()) {
|
||||||
named = u.withUnresolvedMessage(
|
named = u.withUnresolvedMessage(
|
||||||
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().esName() + "] only its subfields");
|
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().esType + "] only its subfields");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -543,7 +546,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
||||||
private Integer findOrdinal(Expression expression) {
|
private Integer findOrdinal(Expression expression) {
|
||||||
if (expression instanceof Literal) {
|
if (expression instanceof Literal) {
|
||||||
Literal l = (Literal) expression;
|
Literal l = (Literal) expression;
|
||||||
if (l.dataType().isInteger()) {
|
if (l.dataType().isInteger) {
|
||||||
Object v = l.value();
|
Object v = l.value();
|
||||||
if (v instanceof Number) {
|
if (v instanceof Number) {
|
||||||
return Integer.valueOf(((Number) v).intValue());
|
return Integer.valueOf(((Number) v).intValue());
|
||||||
|
@ -1030,13 +1033,13 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
||||||
if (left != null) {
|
if (left != null) {
|
||||||
DataType l = left.dataType();
|
DataType l = left.dataType();
|
||||||
DataType r = right.dataType();
|
DataType r = right.dataType();
|
||||||
if (!l.same(r)) {
|
if (l != r) {
|
||||||
DataType common = DataTypeConversion.commonType(l, r);
|
DataType common = DataTypeConversion.commonType(l, r);
|
||||||
if (common == null) {
|
if (common == null) {
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
left = l.same(common) ? left : new Cast(left.location(), left, common);
|
left = l == common ? left : new Cast(left.location(), left, common);
|
||||||
right = r.same(common) ? right : new Cast(right.location(), right, common);
|
right = r == common ? right : new Cast(right.location(), right, common);
|
||||||
return e.replaceChildren(Arrays.asList(left, right));
|
return e.replaceChildren(Arrays.asList(left, right));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Filter;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
|
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
|
||||||
import org.elasticsearch.xpack.sql.tree.Node;
|
import org.elasticsearch.xpack.sql.tree.Node;
|
||||||
import org.elasticsearch.xpack.sql.type.UnsupportedDataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -138,7 +138,7 @@ abstract class Verifier {
|
||||||
for (Attribute a : p.intputSet()) {
|
for (Attribute a : p.intputSet()) {
|
||||||
String nameCandidate = useQualifier ? a.qualifiedName() : a.name();
|
String nameCandidate = useQualifier ? a.qualifiedName() : a.name();
|
||||||
// add only primitives (object types would only result in another error)
|
// add only primitives (object types would only result in another error)
|
||||||
if (!(a.dataType() instanceof UnsupportedDataType) && a.dataType().isPrimitive()) {
|
if (!(a.dataType() == DataType.UNSUPPORTED) && a.dataType().isPrimitive()) {
|
||||||
potentialMatches.add(nameCandidate);
|
potentialMatches.add(nameCandidate);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,16 +5,16 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.sql.analysis.index;
|
package org.elasticsearch.xpack.sql.analysis.index;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class EsIndex {
|
public class EsIndex {
|
||||||
|
|
||||||
private final String name;
|
private final String name;
|
||||||
private final Map<String, DataType> mapping;
|
private final Map<String, EsField> mapping;
|
||||||
|
|
||||||
public EsIndex(String name, Map<String, DataType> mapping) {
|
public EsIndex(String name, Map<String, EsField> mapping) {
|
||||||
assert name != null;
|
assert name != null;
|
||||||
assert mapping != null;
|
assert mapping != null;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
|
@ -25,7 +25,7 @@ public class EsIndex {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, DataType> mapping() {
|
public Map<String, EsField> mapping() {
|
||||||
return mapping;
|
return mapping;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
import org.elasticsearch.index.IndexNotFoundException;
|
import org.elasticsearch.index.IndexNotFoundException;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.type.Types;
|
import org.elasticsearch.xpack.sql.type.Types;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -279,7 +279,7 @@ public class IndexResolver {
|
||||||
"[" + indexOrAlias + "] contains more than one type " + typeNames + " so it is incompatible with sql");
|
"[" + indexOrAlias + "] contains more than one type " + typeNames + " so it is incompatible with sql");
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
Map<String, DataType> mapping = Types.fromEs(singleType.sourceAsMap());
|
Map<String, EsField> mapping = Types.fromEs(singleType.sourceAsMap());
|
||||||
return IndexResolution.valid(new EsIndex(indexOrAlias, mapping));
|
return IndexResolution.valid(new EsIndex(indexOrAlias, mapping));
|
||||||
} catch (MappingException ex) {
|
} catch (MappingException ex) {
|
||||||
return IndexResolution.invalid(ex.getMessage());
|
return IndexResolution.invalid(ex.getMessage());
|
||||||
|
|
|
@ -8,9 +8,11 @@ package org.elasticsearch.xpack.sql.expression;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
|
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class Alias extends NamedExpression {
|
public class Alias extends NamedExpression {
|
||||||
|
@ -87,10 +89,13 @@ public class Alias extends NamedExpression {
|
||||||
|
|
||||||
Attribute attr = Expressions.attribute(c);
|
Attribute attr = Expressions.attribute(c);
|
||||||
if (attr != null) {
|
if (attr != null) {
|
||||||
return attr.clone(location(), name(), child.dataType(), qualifier, child.nullable(), id(), synthetic());
|
return attr.clone(location(), name(), qualifier, child.nullable(), id(), synthetic());
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return new FieldAttribute(location(), null, name(), child.dataType(), qualifier, child.nullable(), id(), synthetic());
|
// TODO: WE need to fix this fake Field
|
||||||
|
return new FieldAttribute(location(), null, name(),
|
||||||
|
new EsField(name(), child.dataType(), Collections.emptyMap(), true),
|
||||||
|
qualifier, child.nullable(), id(), synthetic());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -66,32 +66,19 @@ public abstract class Attribute extends NamedExpression {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Attribute withLocation(Location location) {
|
public Attribute withLocation(Location location) {
|
||||||
return Objects.equals(location(), location) ? this : clone(location, name(), dataType(), qualifier(),
|
return Objects.equals(location(), location) ? this : clone(location, name(), qualifier(), nullable(), id(), synthetic());
|
||||||
nullable(), id(), synthetic());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Attribute withQualifier(String qualifier) {
|
public Attribute withQualifier(String qualifier) {
|
||||||
return Objects.equals(qualifier(), qualifier) ? this : clone(location(), name(), dataType(), qualifier,
|
return Objects.equals(qualifier(), qualifier) ? this : clone(location(), name(), qualifier, nullable(), id(), synthetic());
|
||||||
nullable(), id(), synthetic());
|
|
||||||
}
|
|
||||||
|
|
||||||
public Attribute withName(String name) {
|
|
||||||
return Objects.equals(name(), name) ? this : clone(location(), name, dataType(), qualifier(), nullable(),
|
|
||||||
id(), synthetic());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Attribute withNullability(boolean nullable) {
|
public Attribute withNullability(boolean nullable) {
|
||||||
return Objects.equals(nullable(), nullable) ? this : clone(location(), name(), dataType(), qualifier(),
|
return Objects.equals(nullable(), nullable) ? this : clone(location(), name(), qualifier(), nullable, id(), synthetic());
|
||||||
nullable, id(), synthetic());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Attribute withId(ExpressionId id) {
|
protected abstract Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id,
|
||||||
return Objects.equals(id(), id) ? this : clone(location(), name(), dataType(), qualifier(), nullable(),
|
boolean synthetic);
|
||||||
id, synthetic());
|
|
||||||
}
|
|
||||||
|
|
||||||
protected abstract Attribute clone(Location location, String name, DataType dataType, String qualifier,
|
|
||||||
boolean nullable, ExpressionId id, boolean synthetic);
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Attribute toAttribute() {
|
public Attribute toAttribute() {
|
||||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.expression;
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
|
||||||
|
|
||||||
public abstract class BinaryLogic extends BinaryOperator {
|
public abstract class BinaryLogic extends BinaryOperator {
|
||||||
|
|
||||||
|
@ -17,12 +16,12 @@ public abstract class BinaryLogic extends BinaryOperator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TypeResolution resolveInputType(DataType inputType) {
|
protected TypeResolution resolveInputType(DataType inputType) {
|
||||||
return DataTypes.BOOLEAN.equals(inputType) ? TypeResolution.TYPE_RESOLVED : new TypeResolution(
|
return DataType.BOOLEAN == inputType ? TypeResolution.TYPE_RESOLVED : new TypeResolution(
|
||||||
"'%s' requires type %s not %s", symbol(), DataTypes.BOOLEAN.sqlName(), inputType.sqlName());
|
"'%s' requires type %s not %s", symbol(), DataType.BOOLEAN.sqlName(), inputType.sqlName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ public class Exists extends SubQueryExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -97,6 +97,6 @@ public abstract class Expressions {
|
||||||
|
|
||||||
public static TypeResolution typeMustBeNumeric(Expression e) {
|
public static TypeResolution typeMustBeNumeric(Expression e) {
|
||||||
return e.dataType().isNumeric()? TypeResolution.TYPE_RESOLVED : new TypeResolution(
|
return e.dataType().isNumeric()? TypeResolution.TYPE_RESOLVED : new TypeResolution(
|
||||||
"Argument required to be numeric ('%s' of type '%s')", Expressions.name(e), e.dataType().esName());
|
"Argument required to be numeric ('" + Expressions.name(e) + "' of type '" + e.dataType().esType + "')");
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -6,17 +6,12 @@
|
||||||
package org.elasticsearch.xpack.sql.expression;
|
package org.elasticsearch.xpack.sql.expression;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.KeywordType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.type.NestedType;
|
|
||||||
import org.elasticsearch.xpack.sql.type.StringType;
|
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Map.Entry;
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -32,26 +27,28 @@ public class FieldAttribute extends TypedAttribute {
|
||||||
private final FieldAttribute parent;
|
private final FieldAttribute parent;
|
||||||
private final FieldAttribute nestedParent;
|
private final FieldAttribute nestedParent;
|
||||||
private final String path;
|
private final String path;
|
||||||
|
private final EsField field;
|
||||||
|
|
||||||
public FieldAttribute(Location location, String name, DataType dataType) {
|
public FieldAttribute(Location location, String name, EsField field) {
|
||||||
this(location, null, name, dataType);
|
this(location, null, name, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldAttribute(Location location, FieldAttribute parent, String name, DataType dataType) {
|
public FieldAttribute(Location location, FieldAttribute parent, String name, EsField field) {
|
||||||
this(location, parent, name, dataType, null, true, null, false);
|
this(location, parent, name, field, null, true, null, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldAttribute(Location location, FieldAttribute parent, String name, DataType dataType, String qualifier,
|
public FieldAttribute(Location location, FieldAttribute parent, String name, EsField field, String qualifier,
|
||||||
boolean nullable, ExpressionId id, boolean synthetic) {
|
boolean nullable, ExpressionId id, boolean synthetic) {
|
||||||
super(location, name, dataType, qualifier, nullable, id, synthetic);
|
super(location, name, field.getDataType(), qualifier, nullable, id, synthetic);
|
||||||
this.path = parent != null ? parent.name() : StringUtils.EMPTY;
|
this.path = parent != null ? parent.name() : StringUtils.EMPTY;
|
||||||
this.parent = parent;
|
this.parent = parent;
|
||||||
|
this.field = field;
|
||||||
|
|
||||||
// figure out the last nested parent
|
// figure out the last nested parent
|
||||||
FieldAttribute nestedPar = null;
|
FieldAttribute nestedPar = null;
|
||||||
if (parent != null) {
|
if (parent != null) {
|
||||||
nestedPar = parent.nestedParent;
|
nestedPar = parent.nestedParent;
|
||||||
if (parent.dataType() instanceof NestedType) {
|
if (parent.dataType() == DataType.NESTED) {
|
||||||
nestedPar = parent;
|
nestedPar = parent;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -60,8 +57,7 @@ public class FieldAttribute extends TypedAttribute {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected NodeInfo<FieldAttribute> info() {
|
protected NodeInfo<FieldAttribute> info() {
|
||||||
return NodeInfo.create(this, FieldAttribute::new,
|
return NodeInfo.create(this, FieldAttribute::new, parent, name(), field, qualifier(), nullable(), id(), synthetic());
|
||||||
parent, name(), dataType(), qualifier(), nullable(), id(), synthetic());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldAttribute parent() {
|
public FieldAttribute parent() {
|
||||||
|
@ -86,44 +82,29 @@ public class FieldAttribute extends TypedAttribute {
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isInexact() {
|
public boolean isInexact() {
|
||||||
return (dataType() instanceof StringType && ((StringType) dataType()).isInexact());
|
return field.isExact() == false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldAttribute exactAttribute() {
|
public FieldAttribute exactAttribute() {
|
||||||
if (isInexact()) {
|
if (field.isExact() == false) {
|
||||||
Map<String, KeywordType> exactFields = ((StringType) dataType()).exactKeywords();
|
return innerField(field.getExactField());
|
||||||
if (exactFields.size() == 1) {
|
|
||||||
Entry<String, KeywordType> entry = exactFields.entrySet().iterator().next();
|
|
||||||
return innerField(entry.getKey(), entry.getValue());
|
|
||||||
}
|
|
||||||
if (exactFields.isEmpty()) {
|
|
||||||
throw new MappingException(
|
|
||||||
"No keyword/multi-field defined exact matches for [%s]; define one or use MATCH/QUERY instead",
|
|
||||||
name());
|
|
||||||
}
|
|
||||||
// pick the default - keyword
|
|
||||||
if (exactFields.size() > 1) {
|
|
||||||
throw new MappingException("Multiple exact keyword candidates %s available for [%s]; specify which one to use",
|
|
||||||
exactFields.keySet(), name());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
private FieldAttribute innerField(String subFieldName, DataType type) {
|
private FieldAttribute innerField(EsField type) {
|
||||||
return new FieldAttribute(location(), this, name() + "." + subFieldName, type, qualifier(), nullable(), id(), synthetic());
|
return new FieldAttribute(location(), this, name() + "." + type.getName(), type, qualifier(), nullable(), id(), synthetic());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Expression canonicalize() {
|
protected Expression canonicalize() {
|
||||||
return new FieldAttribute(location(), null, "<none>", dataType(), null, true, id(), false);
|
return new FieldAttribute(location(), null, "<none>", field, null, true, id(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable,
|
protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||||
ExpressionId id, boolean synthetic) {
|
|
||||||
FieldAttribute qualifiedParent = parent != null ? (FieldAttribute) parent.withQualifier(qualifier) : null;
|
FieldAttribute qualifiedParent = parent != null ? (FieldAttribute) parent.withQualifier(qualifier) : null;
|
||||||
return new FieldAttribute(location, qualifiedParent, name, dataType, qualifier, nullable, id, synthetic);
|
return new FieldAttribute(location, qualifiedParent, name, field, qualifier, nullable, id, synthetic);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -140,4 +121,8 @@ public class FieldAttribute extends TypedAttribute {
|
||||||
protected String label() {
|
protected String label() {
|
||||||
return "f";
|
return "f";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public EsField field() {
|
||||||
|
return field;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,19 +31,19 @@ public abstract class Foldables {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String stringValueOf(Expression e) {
|
public static String stringValueOf(Expression e) {
|
||||||
return valueOf(e, DataTypes.KEYWORD);
|
return valueOf(e, DataType.KEYWORD);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Integer intValueOf(Expression e) {
|
public static Integer intValueOf(Expression e) {
|
||||||
return valueOf(e, DataTypes.INTEGER);
|
return valueOf(e, DataType.INTEGER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Long longValueOf(Expression e) {
|
public static Long longValueOf(Expression e) {
|
||||||
return valueOf(e, DataTypes.LONG);
|
return valueOf(e, DataType.LONG);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static double doubleValueOf(Expression e) {
|
public static double doubleValueOf(Expression e) {
|
||||||
return valueOf(e, DataTypes.DOUBLE);
|
return valueOf(e, DataType.DOUBLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T> List<T> valuesOf(List<Expression> list, DataType to) {
|
public static <T> List<T> valuesOf(List<Expression> list, DataType to) {
|
||||||
|
@ -55,6 +55,6 @@ public abstract class Foldables {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<Double> doubleValuesOf(List<Expression> list) {
|
public static List<Double> doubleValuesOf(List<Expression> list) {
|
||||||
return valuesOf(list, DataTypes.DOUBLE);
|
return valuesOf(list, DataType.DOUBLE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,9 +36,9 @@ public class LiteralAttribute extends TypedAttribute {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected LiteralAttribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable,
|
protected LiteralAttribute clone(Location location, String name, String qualifier, boolean nullable,
|
||||||
ExpressionId id, boolean synthetic) {
|
ExpressionId id, boolean synthetic) {
|
||||||
return new LiteralAttribute(location, name, qualifier, nullable, id, synthetic, dataType, literal);
|
return new LiteralAttribute(location, name, qualifier, nullable, id, synthetic, dataType(), literal);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ProcessorDefinition asProcessorDefinition() {
|
public ProcessorDefinition asProcessorDefinition() {
|
||||||
|
|
|
@ -65,8 +65,7 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable,
|
protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||||
ExpressionId id, boolean synthetic) {
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class Score extends Function {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.FLOAT;
|
return DataType.FLOAT;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -21,14 +21,14 @@ public class ScoreAttribute extends FunctionAttribute {
|
||||||
* Constructor for normal use.
|
* Constructor for normal use.
|
||||||
*/
|
*/
|
||||||
public ScoreAttribute(Location location) {
|
public ScoreAttribute(Location location) {
|
||||||
this(location, "SCORE()", DataTypes.FLOAT, null, false, null, false);
|
this(location, "SCORE()", DataType.FLOAT, null, false, null, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor for {@link #clone()}
|
* Constructor for {@link #clone()}
|
||||||
*/
|
*/
|
||||||
private ScoreAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id,
|
private ScoreAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id,
|
||||||
boolean synthetic) {
|
boolean synthetic) {
|
||||||
super(location, name, dataType, qualifier, nullable, id, synthetic, "SCORE");
|
super(location, name, dataType, qualifier, nullable, id, synthetic, "SCORE");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,9 +38,8 @@ public class ScoreAttribute extends FunctionAttribute {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable,
|
protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||||
ExpressionId id, boolean synthetic) {
|
return new ScoreAttribute(location, name, dataType(), qualifier, nullable, id, synthetic);
|
||||||
return new ScoreAttribute(location, name, dataType, qualifier, nullable, id, synthetic);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -46,11 +46,10 @@ public class AggregateFunctionAttribute extends FunctionAttribute {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier,
|
protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||||
boolean nullable, ExpressionId id, boolean synthetic) {
|
|
||||||
// this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl)
|
// this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl)
|
||||||
// that is the functionId is actually derived from the expression id to easily track it across contexts
|
// that is the functionId is actually derived from the expression id to easily track it across contexts
|
||||||
return new AggregateFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, functionId(), propertyPath);
|
return new AggregateFunctionAttribute(location, name, dataType(), qualifier, nullable, id, synthetic, functionId(), propertyPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
public AggregateFunctionAttribute withFunctionId(String functionId, String propertyPath) {
|
public AggregateFunctionAttribute withFunctionId(String functionId, String propertyPath) {
|
||||||
|
|
|
@ -47,7 +47,7 @@ public class Count extends AggregateFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.LONG;
|
return DataType.LONG;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -33,7 +33,7 @@ public class Mean extends NumericAggregate implements MatrixStatsEnclosed {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.DOUBLE;
|
return DataType.DOUBLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -30,6 +30,6 @@ abstract class NumericAggregate extends AggregateFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.DOUBLE;
|
return DataType.DOUBLE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,7 @@ public class Percentile extends NumericAggregate implements EnclosedAgg {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.DOUBLE;
|
return DataType.DOUBLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -56,7 +56,7 @@ public class PercentileRank extends AggregateFunction implements EnclosedAgg {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.DOUBLE;
|
return DataType.DOUBLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -65,9 +65,8 @@ public class ScalarFunctionAttribute extends FunctionAttribute {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier,
|
protected Attribute clone(Location location, String name, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||||
boolean nullable, ExpressionId id, boolean synthetic) {
|
return new ScalarFunctionAttribute(location, name, dataType(), qualifier, nullable, id, synthetic,
|
||||||
return new ScalarFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic,
|
|
||||||
functionId(), script, orderBy, processorDef);
|
functionId(), script, orderBy, processorDef);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTempl
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
|
@ -75,11 +74,11 @@ public abstract class DateTimeFunction extends UnaryScalarFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected TypeResolution resolveType() {
|
protected TypeResolution resolveType() {
|
||||||
if (field().dataType().same(DataTypes.DATE)) {
|
if (field().dataType() == DataType.DATE) {
|
||||||
return TypeResolution.TYPE_RESOLVED;
|
return TypeResolution.TYPE_RESOLVED;
|
||||||
}
|
}
|
||||||
return new TypeResolution("Function [" + functionName() + "] cannot be applied on a non-date expression (["
|
return new TypeResolution("Function [" + functionName() + "] cannot be applied on a non-date expression (["
|
||||||
+ Expressions.name(field()) + "] of type [" + field().dataType().esName() + "])");
|
+ Expressions.name(field()) + "] of type [" + field().dataType().esType + "])");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -137,7 +136,7 @@ public abstract class DateTimeFunction extends UnaryScalarFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.INTEGER;
|
return DataType.INTEGER;
|
||||||
}
|
}
|
||||||
|
|
||||||
// used for applying ranges
|
// used for applying ranges
|
||||||
|
|
|
@ -13,15 +13,16 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
public class E extends MathFunction {
|
public class E extends MathFunction {
|
||||||
|
|
||||||
private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.E", Params.EMPTY, DataTypes.DOUBLE);
|
private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.E", Params.EMPTY, DataType.DOUBLE);
|
||||||
|
|
||||||
public E(Location location) {
|
public E(Location location) {
|
||||||
super(location, new Literal(location, Math.E, DataTypes.DOUBLE));
|
super(location, new Literal(location, Math.E, DataType.DOUBLE));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -46,7 +46,7 @@ public abstract class MathFunction extends UnaryScalarFunction {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.DOUBLE;
|
return DataType.DOUBLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -13,15 +13,16 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
public class Pi extends MathFunction {
|
public class Pi extends MathFunction {
|
||||||
|
|
||||||
private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.PI", Params.EMPTY, DataTypes.DOUBLE);
|
private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.PI", Params.EMPTY, DataType.DOUBLE);
|
||||||
|
|
||||||
public Pi(Location location) {
|
public Pi(Location location) {
|
||||||
super(location, new Literal(location, Math.PI, DataTypes.DOUBLE));
|
super(location, new Literal(location, Math.PI, DataType.DOUBLE));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -28,7 +28,7 @@ public class ScriptTemplate {
|
||||||
private final DataType outputType;
|
private final DataType outputType;
|
||||||
|
|
||||||
public ScriptTemplate(String template) {
|
public ScriptTemplate(String template) {
|
||||||
this(template, Params.EMPTY, DataTypes.KEYWORD);
|
this(template, Params.EMPTY, DataType.KEYWORD);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ScriptTemplate(String template, Params params, DataType outputType) {
|
public ScriptTemplate(String template, Params params, DataType outputType) {
|
||||||
|
|
|
@ -30,7 +30,7 @@ public abstract class BinaryComparison extends BinaryOperator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class In extends Expression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -29,7 +29,7 @@ public class IsNotNull extends UnaryExpression {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Object fold() {
|
public Object fold() {
|
||||||
return child().fold() != null && !DataTypes.NULL.same(child().dataType());
|
return child().fold() != null && !DataTypes.isNull(child().dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -39,7 +39,7 @@ public class IsNotNull extends UnaryExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -12,7 +12,6 @@ import org.elasticsearch.xpack.sql.expression.UnaryExpression;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
|
@ -33,11 +32,11 @@ public class Not extends UnaryExpression {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected TypeResolution resolveType() {
|
protected TypeResolution resolveType() {
|
||||||
if (DataTypes.BOOLEAN.same(child().dataType())) {
|
if (DataType.BOOLEAN == child().dataType()) {
|
||||||
return TypeResolution.TYPE_RESOLVED;
|
return TypeResolution.TYPE_RESOLVED;
|
||||||
}
|
}
|
||||||
return new TypeResolution("Cannot negate expression ([" + Expressions.name(child()) + "] of type ["
|
return new TypeResolution("Cannot negate expression ([" + Expressions.name(child()) + "] of type ["
|
||||||
+ child().dataType().esName() + "])");
|
+ child().dataType().esType + "])");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -56,6 +55,6 @@ public class Not extends UnaryExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class Range extends Expression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -63,7 +63,7 @@ public abstract class FullTextPredicate extends Expression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class Like extends BinaryExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -82,7 +82,7 @@ public class LikePattern extends LeafExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.KEYWORD;
|
return DataType.KEYWORD;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -48,7 +48,7 @@ public class RLike extends BinaryExpression {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -855,7 +855,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
||||||
if (as.child() instanceof Cast) {
|
if (as.child() instanceof Cast) {
|
||||||
Cast c = (Cast) as.child();
|
Cast c = (Cast) as.child();
|
||||||
|
|
||||||
if (c.from().same(c.to())) {
|
if (c.from() == c.to()) {
|
||||||
Alias newAs = new Alias(as.location(), as.name(), as.qualifier(), c.field(), as.id(), as.synthetic());
|
Alias newAs = new Alias(as.location(), as.name(), as.qualifier(), c.field(), as.id(), as.synthetic());
|
||||||
replacedCast.put(as.toAttribute(), newAs.toAttribute());
|
replacedCast.put(as.toAttribute(), newAs.toAttribute());
|
||||||
return newAs;
|
return newAs;
|
||||||
|
@ -871,7 +871,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
||||||
if (e instanceof Cast) {
|
if (e instanceof Cast) {
|
||||||
Cast c = (Cast) e;
|
Cast c = (Cast) e;
|
||||||
|
|
||||||
if (c.from().same(c.to())) {
|
if (c.from() == c.to()) {
|
||||||
Expression argument = c.field();
|
Expression argument = c.field();
|
||||||
if (argument instanceof NamedExpression) {
|
if (argument instanceof NamedExpression) {
|
||||||
replacedCast.put(c.toAttribute(), ((NamedExpression) argument).toAttribute());
|
replacedCast.put(c.toAttribute(), ((NamedExpression) argument).toAttribute());
|
||||||
|
|
|
@ -192,7 +192,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
||||||
e = new Like(loc, exp, visitPattern(pCtx.pattern()));
|
e = new Like(loc, exp, visitPattern(pCtx.pattern()));
|
||||||
break;
|
break;
|
||||||
case SqlBaseParser.RLIKE:
|
case SqlBaseParser.RLIKE:
|
||||||
e = new RLike(loc, exp, new Literal(source(pCtx.regex), string(pCtx.regex), DataTypes.KEYWORD));
|
e = new RLike(loc, exp, new Literal(source(pCtx.regex), string(pCtx.regex), DataType.KEYWORD));
|
||||||
break;
|
break;
|
||||||
case SqlBaseParser.NULL:
|
case SqlBaseParser.NULL:
|
||||||
// shortcut to avoid double negation later on (since there's no IsNull (missing in ES is a negated exists))
|
// shortcut to avoid double negation later on (since there's no IsNull (missing in ES is a negated exists))
|
||||||
|
@ -335,31 +335,31 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
||||||
case "bit":
|
case "bit":
|
||||||
case "bool":
|
case "bool":
|
||||||
case "boolean":
|
case "boolean":
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
case "tinyint":
|
case "tinyint":
|
||||||
case "byte":
|
case "byte":
|
||||||
return DataTypes.BYTE;
|
return DataType.BYTE;
|
||||||
case "smallint":
|
case "smallint":
|
||||||
case "short":
|
case "short":
|
||||||
return DataTypes.SHORT;
|
return DataType.SHORT;
|
||||||
case "int":
|
case "int":
|
||||||
case "integer":
|
case "integer":
|
||||||
return DataTypes.INTEGER;
|
return DataType.INTEGER;
|
||||||
case "long":
|
case "long":
|
||||||
case "bigint":
|
case "bigint":
|
||||||
return DataTypes.LONG;
|
return DataType.LONG;
|
||||||
case "real":
|
case "real":
|
||||||
return DataTypes.FLOAT;
|
return DataType.FLOAT;
|
||||||
case "float":
|
case "float":
|
||||||
case "double":
|
case "double":
|
||||||
return DataTypes.DOUBLE;
|
return DataType.DOUBLE;
|
||||||
case "date":
|
case "date":
|
||||||
case "timestamp":
|
case "timestamp":
|
||||||
return DataTypes.DATE;
|
return DataType.DATE;
|
||||||
case "char":
|
case "char":
|
||||||
case "varchar":
|
case "varchar":
|
||||||
case "string":
|
case "string":
|
||||||
return DataTypes.KEYWORD;
|
return DataType.KEYWORD;
|
||||||
default:
|
default:
|
||||||
throw new ParsingException(source(ctx), "Does not recognize type %s", type);
|
throw new ParsingException(source(ctx), "Does not recognize type %s", type);
|
||||||
}
|
}
|
||||||
|
@ -433,12 +433,12 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Expression visitNullLiteral(NullLiteralContext ctx) {
|
public Expression visitNullLiteral(NullLiteralContext ctx) {
|
||||||
return new Literal(source(ctx), null, DataTypes.NULL);
|
return new Literal(source(ctx), null, DataType.NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Expression visitBooleanLiteral(BooleanLiteralContext ctx) {
|
public Expression visitBooleanLiteral(BooleanLiteralContext ctx) {
|
||||||
return new Literal(source(ctx), Booleans.parseBoolean(ctx.getText().toLowerCase(Locale.ROOT), false), DataTypes.BOOLEAN);
|
return new Literal(source(ctx), Booleans.parseBoolean(ctx.getText().toLowerCase(Locale.ROOT), false), DataType.BOOLEAN);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -447,18 +447,18 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
||||||
for (TerminalNode node : ctx.STRING()) {
|
for (TerminalNode node : ctx.STRING()) {
|
||||||
sb.append(unquoteString(text(node)));
|
sb.append(unquoteString(text(node)));
|
||||||
}
|
}
|
||||||
return new Literal(source(ctx), sb.toString(), DataTypes.KEYWORD);
|
return new Literal(source(ctx), sb.toString(), DataType.KEYWORD);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object visitDecimalLiteral(DecimalLiteralContext ctx) {
|
public Object visitDecimalLiteral(DecimalLiteralContext ctx) {
|
||||||
return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataTypes.DOUBLE);
|
return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataType.DOUBLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object visitIntegerLiteral(IntegerLiteralContext ctx) {
|
public Object visitIntegerLiteral(IntegerLiteralContext ctx) {
|
||||||
BigDecimal bigD = new BigDecimal(ctx.getText());
|
BigDecimal bigD = new BigDecimal(ctx.getText());
|
||||||
// TODO: this can be improved to use the smallest type available
|
// TODO: this can be improved to use the smallest type available
|
||||||
return new Literal(source(ctx), bigD.longValueExact(), DataTypes.INTEGER);
|
return new Literal(source(ctx), bigD.longValueExact(), DataType.INTEGER);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -41,6 +41,7 @@ import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation;
|
import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.With;
|
import org.elasticsearch.xpack.sql.plan.logical.With;
|
||||||
import org.elasticsearch.xpack.sql.session.EmptyExecutable;
|
import org.elasticsearch.xpack.sql.session.EmptyExecutable;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
@ -88,7 +89,7 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder {
|
||||||
|
|
||||||
if (ctx.limit != null && ctx.INTEGER_VALUE() != null) {
|
if (ctx.limit != null && ctx.INTEGER_VALUE() != null) {
|
||||||
plan = new Limit(source(ctx.limit), new Literal(source(ctx),
|
plan = new Limit(source(ctx.limit), new Literal(source(ctx),
|
||||||
Integer.parseInt(ctx.limit.getText()), DataTypes.INTEGER), plan);
|
Integer.parseInt(ctx.limit.getText()), DataType.INTEGER), plan);
|
||||||
}
|
}
|
||||||
|
|
||||||
return plan;
|
return plan;
|
||||||
|
|
|
@ -10,8 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.CompoundDataType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -36,23 +35,23 @@ public class EsRelation extends LeafPlan {
|
||||||
return NodeInfo.create(this, EsRelation::new, index);
|
return NodeInfo.create(this, EsRelation::new, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<Attribute> flatten(Location location, Map<String, DataType> mapping) {
|
private static List<Attribute> flatten(Location location, Map<String, EsField> mapping) {
|
||||||
return flatten(location, mapping, null);
|
return flatten(location, mapping, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<Attribute> flatten(Location location, Map<String, DataType> mapping, FieldAttribute parent) {
|
private static List<Attribute> flatten(Location location, Map<String, EsField> mapping, FieldAttribute parent) {
|
||||||
List<Attribute> list = new ArrayList<>();
|
List<Attribute> list = new ArrayList<>();
|
||||||
|
|
||||||
for (Entry<String, DataType> entry : mapping.entrySet()) {
|
for (Entry<String, EsField> entry : mapping.entrySet()) {
|
||||||
String name = entry.getKey();
|
String name = entry.getKey();
|
||||||
DataType t = entry.getValue();
|
EsField t = entry.getValue();
|
||||||
|
|
||||||
if (t != null) {
|
if (t != null) {
|
||||||
FieldAttribute f = new FieldAttribute(location, parent, parent != null ? parent.name() + "." + name : name, t);
|
FieldAttribute f = new FieldAttribute(location, parent, parent != null ? parent.name() + "." + name : name, t);
|
||||||
list.add(f);
|
list.add(f);
|
||||||
// object or nested
|
// object or nested
|
||||||
if (t instanceof CompoundDataType) {
|
if (t.getProperties().isEmpty() == false) {
|
||||||
list.addAll(flatten(location, ((CompoundDataType) t).properties(), f));
|
list.addAll(flatten(location, t.getProperties(), f));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
|
|
||||||
import static java.util.stream.Collectors.toList;
|
import static java.util.stream.Collectors.toList;
|
||||||
|
@ -100,7 +101,7 @@ public class Join extends BinaryPlan {
|
||||||
return childrenResolved() &&
|
return childrenResolved() &&
|
||||||
duplicatesResolved() &&
|
duplicatesResolved() &&
|
||||||
expressionsResolved() &&
|
expressionsResolved() &&
|
||||||
(condition == null || DataTypes.BOOLEAN.equals(condition.dataType()));
|
(condition == null || DataType.BOOLEAN == condition.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.Node;
|
import org.elasticsearch.xpack.sql.tree.Node;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeUtils;
|
import org.elasticsearch.xpack.sql.tree.NodeUtils;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
import org.elasticsearch.xpack.sql.util.Graphviz;
|
import org.elasticsearch.xpack.sql.util.Graphviz;
|
||||||
|
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
|
@ -71,7 +71,7 @@ public class Debug extends Command {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Attribute> output() {
|
public List<Attribute> output() {
|
||||||
return singletonList(new FieldAttribute(location(), "plan", DataTypes.KEYWORD));
|
return singletonList(new FieldAttribute(location(), "plan", new KeywordEsField("plan")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
import org.elasticsearch.xpack.sql.util.Graphviz;
|
import org.elasticsearch.xpack.sql.util.Graphviz;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -82,7 +82,7 @@ public class Explain extends Command {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Attribute> output() {
|
public List<Attribute> output() {
|
||||||
return singletonList(new FieldAttribute(location(), "plan", DataTypes.KEYWORD));
|
return singletonList(new FieldAttribute(location(), "plan", new KeywordEsField("plan")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -8,15 +8,14 @@ package org.elasticsearch.xpack.sql.plan.logical.command;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
|
||||||
import org.elasticsearch.xpack.sql.session.Rows;
|
import org.elasticsearch.xpack.sql.session.Rows;
|
||||||
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.CompoundDataType;
|
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -47,8 +46,8 @@ public class ShowColumns extends Command {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Attribute> output() {
|
public List<Attribute> output() {
|
||||||
return asList(new FieldAttribute(location(), "column", DataTypes.KEYWORD),
|
return asList(new FieldAttribute(location(), "column", new KeywordEsField("column")),
|
||||||
new FieldAttribute(location(), "type", DataTypes.KEYWORD));
|
new FieldAttribute(location(), "type", new KeywordEsField("type")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -66,15 +65,16 @@ public class ShowColumns extends Command {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fillInRows(Map<String, DataType> mapping, String prefix, List<List<?>> rows) {
|
private void fillInRows(Map<String, EsField> mapping, String prefix, List<List<?>> rows) {
|
||||||
for (Entry<String, DataType> e : mapping.entrySet()) {
|
for (Entry<String, EsField> e : mapping.entrySet()) {
|
||||||
DataType dt = e.getValue();
|
EsField field = e.getValue();
|
||||||
|
DataType dt = field.getDataType();
|
||||||
String name = e.getKey();
|
String name = e.getKey();
|
||||||
if (dt != null) {
|
if (dt != null) {
|
||||||
rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName()));
|
rows.add(asList(prefix != null ? prefix + "." + name : name, dt.sqlName()));
|
||||||
if (dt instanceof CompoundDataType) {
|
if (field.getProperties().isEmpty() == false) {
|
||||||
String newPrefix = prefix != null ? prefix + "." + name : name;
|
String newPrefix = prefix != null ? prefix + "." + name : name;
|
||||||
fillInRows(((CompoundDataType) dt).properties(), newPrefix, rows);
|
fillInRows(field.getProperties(), newPrefix, rows);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -45,8 +45,8 @@ public class ShowFunctions extends Command {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Attribute> output() {
|
public List<Attribute> output() {
|
||||||
return asList(new FieldAttribute(location(), "name", DataTypes.KEYWORD),
|
return asList(new FieldAttribute(location(), "name", new KeywordEsField("name")),
|
||||||
new FieldAttribute(location(), "type", DataTypes.KEYWORD));
|
new FieldAttribute(location(), "type", new KeywordEsField("type")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ public class ShowSchemas extends Command {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Attribute> output() {
|
public List<Attribute> output() {
|
||||||
return singletonList(new FieldAttribute(location(), "schema", DataTypes.KEYWORD));
|
return singletonList(new FieldAttribute(location(), "schema", new KeywordEsField("schema")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -42,8 +42,8 @@ public class ShowTables extends Command {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Attribute> output() {
|
public List<Attribute> output() {
|
||||||
return asList(new FieldAttribute(location(), "name", DataTypes.KEYWORD),
|
return asList(new FieldAttribute(location(), "name", new KeywordEsField("name")),
|
||||||
new FieldAttribute(location(), "type", DataTypes.KEYWORD));
|
new FieldAttribute(location(), "type", new KeywordEsField("type")));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -85,6 +85,7 @@ import org.elasticsearch.xpack.sql.querydsl.query.ScriptQuery;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.query.TermQuery;
|
import org.elasticsearch.xpack.sql.querydsl.query.TermQuery;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery;
|
import org.elasticsearch.xpack.sql.querydsl.query.WildcardQuery;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
import org.elasticsearch.xpack.sql.util.Check;
|
import org.elasticsearch.xpack.sql.util.Check;
|
||||||
import org.elasticsearch.xpack.sql.util.ReflectionUtils;
|
import org.elasticsearch.xpack.sql.util.ReflectionUtils;
|
||||||
|
@ -553,7 +554,7 @@ abstract class QueryTranslator {
|
||||||
// no need to bind the wrapped/target agg - it is already available through the nested script
|
// no need to bind the wrapped/target agg - it is already available through the nested script
|
||||||
// (needed to create the script itself)
|
// (needed to create the script itself)
|
||||||
Params params = paramsBuilder().script(scriptTemplate.params()).variable(valueOf(bc.right())).build();
|
Params params = paramsBuilder().script(scriptTemplate.params()).variable(valueOf(bc.right())).build();
|
||||||
ScriptTemplate script = new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
ScriptTemplate script = new ScriptTemplate(template, params, DataType.BOOLEAN);
|
||||||
if (onAggs) {
|
if (onAggs) {
|
||||||
aggFilter = new AggFilter(at.id().toString(), script);
|
aggFilter = new AggFilter(at.id().toString(), script);
|
||||||
}
|
}
|
||||||
|
@ -580,7 +581,7 @@ abstract class QueryTranslator {
|
||||||
params = paramsBuilder().agg(fa).variable(valueOf(bc.right())).build();
|
params = paramsBuilder().agg(fa).variable(valueOf(bc.right())).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
aggFilter = new AggFilter(at.id().toString(), new ScriptTemplate(template, params, DataTypes.BOOLEAN));
|
aggFilter = new AggFilter(at.id().toString(), new ScriptTemplate(template, params, DataType.BOOLEAN));
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
|
@ -675,7 +676,7 @@ abstract class QueryTranslator {
|
||||||
.variable(upper)
|
.variable(upper)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ScriptTemplate script = new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
ScriptTemplate script = new ScriptTemplate(template, params, DataType.BOOLEAN);
|
||||||
|
|
||||||
if (onAggs) {
|
if (onAggs) {
|
||||||
aggFilter = new AggFilter(at.id().toString(), script);
|
aggFilter = new AggFilter(at.id().toString(), script);
|
||||||
|
@ -708,7 +709,7 @@ abstract class QueryTranslator {
|
||||||
|
|
||||||
}
|
}
|
||||||
aggFilter = new AggFilter(((NamedExpression) r.value()).id().toString(),
|
aggFilter = new AggFilter(((NamedExpression) r.value()).id().toString(),
|
||||||
new ScriptTemplate(template, params, DataTypes.BOOLEAN));
|
new ScriptTemplate(template, params, DataType.BOOLEAN));
|
||||||
}
|
}
|
||||||
//
|
//
|
||||||
// WHERE
|
// WHERE
|
||||||
|
|
|
@ -15,7 +15,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
|
import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -57,16 +57,17 @@ public class TransportSqlListColumnsAction extends HandledTransportAction<SqlLis
|
||||||
List<MetaColumnInfo> columns = new ArrayList<>();
|
List<MetaColumnInfo> columns = new ArrayList<>();
|
||||||
for (EsIndex esIndex : esIndices) {
|
for (EsIndex esIndex : esIndices) {
|
||||||
int pos = 0;
|
int pos = 0;
|
||||||
for (Map.Entry<String, DataType> entry : esIndex.mapping().entrySet()) {
|
for (Map.Entry<String, EsField> entry : esIndex.mapping().entrySet()) {
|
||||||
String name = entry.getKey();
|
String name = entry.getKey();
|
||||||
pos++; // JDBC is 1-based so we start with 1 here
|
pos++; // JDBC is 1-based so we start with 1 here
|
||||||
if (columnMatcher == null || columnMatcher.matcher(name).matches()) {
|
if (columnMatcher == null || columnMatcher.matcher(name).matches()) {
|
||||||
DataType type = entry.getValue();
|
EsField field = entry.getValue();
|
||||||
if (request.mode() == JDBC) {
|
if (request.mode() == JDBC) {
|
||||||
// the column size it's actually its precision (based on the Javadocs)
|
// the column size it's actually its precision (based on the Javadocs)
|
||||||
columns.add(new MetaColumnInfo(esIndex.name(), name, type.esName(), type.sqlType(), type.precision(), pos));
|
columns.add(new MetaColumnInfo(esIndex.name(), name, field.getDataType().esType,
|
||||||
|
field.getDataType().jdbcType, field.getPrecision(), pos));
|
||||||
} else {
|
} else {
|
||||||
columns.add(new MetaColumnInfo(esIndex.name(), name, type.esName(), pos));
|
columns.add(new MetaColumnInfo(esIndex.name(), name, field.getDataType().esType, pos));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,9 +73,10 @@ public class TransportSqlQueryAction extends HandledTransportAction<SqlQueryRequ
|
||||||
List<ColumnInfo> columns = new ArrayList<>(rowSet.columnCount());
|
List<ColumnInfo> columns = new ArrayList<>(rowSet.columnCount());
|
||||||
for (Schema.Entry entry : rowSet.schema()) {
|
for (Schema.Entry entry : rowSet.schema()) {
|
||||||
if (request.mode() == JDBC) {
|
if (request.mode() == JDBC) {
|
||||||
columns.add(new ColumnInfo("", entry.name(), entry.type().esName(), entry.type().sqlType(), entry.type().displaySize()));
|
columns.add(new ColumnInfo("", entry.name(), entry.type().esType, entry.type().jdbcType,
|
||||||
|
entry.type().displaySize));
|
||||||
} else {
|
} else {
|
||||||
columns.add(new ColumnInfo("", entry.name(), entry.type().esName()));
|
columns.add(new ColumnInfo("", entry.name(), entry.type().esType));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
columns = unmodifiableList(columns);
|
columns = unmodifiableList(columns);
|
||||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.querydsl.agg;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
|
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
@ -27,6 +28,6 @@ public class AndAggFilter extends AggFilter {
|
||||||
private static ScriptTemplate and(ScriptTemplate left, ScriptTemplate right) {
|
private static ScriptTemplate and(ScriptTemplate left, ScriptTemplate right) {
|
||||||
String template = format(Locale.ROOT, "( %s ) && ( %s )", left.template(), right.template());
|
String template = format(Locale.ROOT, "( %s ) && ( %s )", left.template(), right.template());
|
||||||
Params params = new ParamsBuilder().script(left.params()).script(right.params()).build();
|
Params params = new ParamsBuilder().script(left.params()).script(right.params()).build();
|
||||||
return new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
return new ScriptTemplate(template, params, DataType.BOOLEAN);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.querydsl.agg;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||||
|
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
@ -27,6 +28,6 @@ public class OrAggFilter extends AggFilter {
|
||||||
private static ScriptTemplate and(ScriptTemplate left, ScriptTemplate right) {
|
private static ScriptTemplate and(ScriptTemplate left, ScriptTemplate right) {
|
||||||
String template = format(Locale.ROOT, "( %s ) || ( %s )", left.template(), right.template());
|
String template = format(Locale.ROOT, "( %s ) || ( %s )", left.template(), right.template());
|
||||||
Params params = new ParamsBuilder().script(left.params()).script(right.params()).build();
|
Params params = new ParamsBuilder().script(left.params()).script(right.params()).build();
|
||||||
return new ScriptTemplate(template, params, DataTypes.BOOLEAN);
|
return new ScriptTemplate(template, params, DataType.BOOLEAN);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -178,7 +178,7 @@ public class QueryContainer {
|
||||||
// reference methods
|
// reference methods
|
||||||
//
|
//
|
||||||
private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) {
|
private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) {
|
||||||
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.dataType().hasDocValues());
|
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.field().hasDocValues());
|
||||||
}
|
}
|
||||||
|
|
||||||
private Tuple<QueryContainer, FieldExtraction> nestedHitFieldRef(FieldAttribute attr) {
|
private Tuple<QueryContainer, FieldExtraction> nestedHitFieldRef(FieldAttribute attr) {
|
||||||
|
@ -187,9 +187,9 @@ public class QueryContainer {
|
||||||
|
|
||||||
String name = aliasName(attr);
|
String name = aliasName(attr);
|
||||||
Query q = rewriteToContainNestedField(query, attr.location(),
|
Query q = rewriteToContainNestedField(query, attr.location(),
|
||||||
attr.nestedParent().name(), name, attr.dataType().hasDocValues());
|
attr.nestedParent().name(), name, attr.field().hasDocValues());
|
||||||
|
|
||||||
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.dataType().hasDocValues(), attr.parent().name());
|
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.field().hasDocValues(), attr.parent().name());
|
||||||
nestedRefs.add(nestedFieldRef);
|
nestedRefs.add(nestedFieldRef);
|
||||||
|
|
||||||
return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef);
|
return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef);
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
abstract class AbstractDataType implements DataType {
|
|
||||||
|
|
||||||
private final JDBCType sqlType;
|
|
||||||
private final boolean hasDocValues;
|
|
||||||
|
|
||||||
AbstractDataType(JDBCType sqlType, boolean hasDocValues) {
|
|
||||||
this.sqlType = sqlType;
|
|
||||||
this.hasDocValues = hasDocValues;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasDocValues() {
|
|
||||||
return hasDocValues;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPrimitive() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JDBCType sqlType() {
|
|
||||||
return sqlType;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return esName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return esName().hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (this == obj) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obj == null || getClass() != obj.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
AbstractDataType other = (AbstractDataType) obj;
|
|
||||||
return Objects.equals(esName(), other.esName());
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,82 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public class ArrayType implements DataType {
|
|
||||||
|
|
||||||
private final DateType type;
|
|
||||||
private final int dimension;
|
|
||||||
|
|
||||||
public ArrayType(DateType type, int dimension) {
|
|
||||||
this.type = type;
|
|
||||||
this.dimension = dimension;
|
|
||||||
}
|
|
||||||
|
|
||||||
public DateType type() {
|
|
||||||
return type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int dimension() {
|
|
||||||
return dimension;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "array";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JDBCType sqlType() {
|
|
||||||
return JDBCType.ARRAY;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int precision() {
|
|
||||||
return type.precision();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isInteger() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isRational() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPrimitive() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasDocValues() {
|
|
||||||
return type.hasDocValues();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(type, dimension);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (this == obj) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obj == null || getClass() != obj.getClass()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
ArrayType other = (ArrayType) obj;
|
|
||||||
return Objects.equals(dimension, other.dimension) && Objects.equals(type, other.type);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class BinaryType extends AbstractDataType {
|
|
||||||
|
|
||||||
BinaryType(boolean docValues) {
|
|
||||||
super(JDBCType.VARBINARY, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "binary";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class BooleanType extends AbstractDataType {
|
|
||||||
|
|
||||||
BooleanType(boolean docValues) {
|
|
||||||
super(JDBCType.BOOLEAN, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "boolean";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class ByteType extends AbstractDataType {
|
|
||||||
|
|
||||||
ByteType(boolean docValues) {
|
|
||||||
super(JDBCType.TINYINT, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "byte";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,54 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public abstract class CompoundDataType extends AbstractDataType {
|
|
||||||
|
|
||||||
private final Map<String, DataType> properties;
|
|
||||||
|
|
||||||
CompoundDataType(JDBCType sqlType, boolean hasDocValues, Map<String, DataType> properties) {
|
|
||||||
super(sqlType, hasDocValues);
|
|
||||||
this.properties = properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, DataType> properties() {
|
|
||||||
return properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int precision() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isInteger() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isRational() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPrimitive() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(super.hashCode(), Objects.hash(properties));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
return super.equals(obj) && Objects.equals(properties, ((CompoundDataType) obj).properties);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -6,58 +6,126 @@
|
||||||
package org.elasticsearch.xpack.sql.type;
|
package org.elasticsearch.xpack.sql.type;
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
import java.sql.JDBCType;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
public interface DataType {
|
/**
|
||||||
|
* Elasticsearch data types that supported by SQL interface
|
||||||
|
*/
|
||||||
|
public enum DataType {
|
||||||
|
// @formatter:off
|
||||||
|
// jdbc type, size, defPrecision, dispSize, sig, int, rat, docvals
|
||||||
|
NULL( JDBCType.NULL, 0, 0, 0),
|
||||||
|
UNSUPPORTED( JDBCType.OTHER, 0, 0, 0),
|
||||||
|
BOOLEAN( JDBCType.BOOLEAN, 1, 1, 1),
|
||||||
|
BYTE( JDBCType.TINYINT, Byte.BYTES, 3, 5, true, true, false, true),
|
||||||
|
SHORT( JDBCType.SMALLINT, Short.BYTES, 5, 6, true, true, false, true),
|
||||||
|
INTEGER( JDBCType.INTEGER, Integer.BYTES, 10, 11, true, true, false, true),
|
||||||
|
LONG( JDBCType.BIGINT, Long.BYTES, 19, 20, true, true, false, true),
|
||||||
|
// 53 bits defaultPrecision ~ 16(15.95) decimal digits (53log10(2)),
|
||||||
|
DOUBLE( JDBCType.DOUBLE, Double.BYTES, 16, 25, true, false, true, true),
|
||||||
|
// 24 bits defaultPrecision - 24*log10(2) =~ 7 (7.22)
|
||||||
|
FLOAT( JDBCType.REAL, Float.BYTES, 7, 15, true, false, true, true),
|
||||||
|
HALF_FLOAT( JDBCType.FLOAT, Double.BYTES, 16, 25, true, false, true, true),
|
||||||
|
// precision is based on long
|
||||||
|
SCALED_FLOAT(JDBCType.FLOAT, Double.BYTES, 19, 25, true, false, true, true),
|
||||||
|
// 39 is maximum address in IPv6
|
||||||
|
IP( JDBCType.VARCHAR, -1, 39, 0),
|
||||||
|
KEYWORD( JDBCType.VARCHAR, Integer.MAX_VALUE, 256, 0),
|
||||||
|
TEXT( JDBCType.VARCHAR, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false, false),
|
||||||
|
OBJECT( JDBCType.STRUCT, -1, 0, 0),
|
||||||
|
NESTED( JDBCType.STRUCT, -1, 0, 0),
|
||||||
|
TOKEN_COUNT( JDBCType.INTEGER, Integer.BYTES, 10, 11),
|
||||||
|
BINARY( JDBCType.VARBINARY, -1, Integer.MAX_VALUE, 0),
|
||||||
|
GEO_POINT( null, -1, Integer.MAX_VALUE, 0),
|
||||||
|
DATE( JDBCType.TIMESTAMP, Long.BYTES, 19, 20);
|
||||||
|
// @formatter:on
|
||||||
|
|
||||||
String esName();
|
/**
|
||||||
|
* Elasticsearch type name
|
||||||
|
*/
|
||||||
|
public final String esType;
|
||||||
|
|
||||||
default String sqlName() {
|
/**
|
||||||
return sqlType().name();
|
* Compatible JDBC type
|
||||||
|
*/
|
||||||
|
public final JDBCType jdbcType;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Size of the type in bytes
|
||||||
|
* <p>
|
||||||
|
* -1 if the size can vary
|
||||||
|
*/
|
||||||
|
public final int size;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Precision
|
||||||
|
* <p>
|
||||||
|
* Specified column size. For numeric data, this is the maximum precision. For character
|
||||||
|
* data, this is the length in characters. For datetime datatypes, this is the length in characters of the
|
||||||
|
* String representation (assuming the maximum allowed defaultPrecision of the fractional seconds component).
|
||||||
|
*/
|
||||||
|
public final int defaultPrecision;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display Size
|
||||||
|
* <p>
|
||||||
|
* Normal maximum width in characters.
|
||||||
|
*/
|
||||||
|
public final int displaySize;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True if the type represents a signed number
|
||||||
|
*/
|
||||||
|
public final boolean isSigned;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True if the type represents an integer number
|
||||||
|
*/
|
||||||
|
public final boolean isInteger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True if the type represents a rational number
|
||||||
|
*/
|
||||||
|
public final boolean isRational;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True if the type supports doc values by default
|
||||||
|
*/
|
||||||
|
public final boolean defaultDocValues;
|
||||||
|
|
||||||
|
DataType(JDBCType jdbcType, int size, int defaultPrecision, int displaySize, boolean isSigned, boolean isInteger, boolean isRational,
|
||||||
|
boolean defaultDocValues) {
|
||||||
|
this.esType = name().toLowerCase(Locale.ROOT);
|
||||||
|
this.jdbcType = jdbcType;
|
||||||
|
this.size = size;
|
||||||
|
this.defaultPrecision = defaultPrecision;
|
||||||
|
this.displaySize = displaySize;
|
||||||
|
this.isSigned = isSigned;
|
||||||
|
this.isInteger = isInteger;
|
||||||
|
this.isRational = isRational;
|
||||||
|
this.defaultDocValues = defaultDocValues;
|
||||||
}
|
}
|
||||||
|
|
||||||
JDBCType sqlType();
|
DataType(JDBCType jdbcType, int size, int defaultPrecision, int displaySize) {
|
||||||
|
this(jdbcType, size, defaultPrecision, displaySize, false, false, false, true);
|
||||||
boolean hasDocValues();
|
|
||||||
|
|
||||||
default Object defaultValue() {
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
default int size() {
|
public String sqlName() {
|
||||||
return JdbcUtils.size(sqlType());
|
return jdbcType.getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
default int precision() {
|
public boolean isNumeric() {
|
||||||
return JdbcUtils.precision(sqlType());
|
return isInteger || isRational;
|
||||||
}
|
}
|
||||||
|
|
||||||
default int scale() {
|
public boolean isString() {
|
||||||
return JdbcUtils.scale(sqlType());
|
return this == KEYWORD || this == TEXT;
|
||||||
}
|
}
|
||||||
|
|
||||||
default int displaySize() {
|
public boolean isPrimitive() {
|
||||||
return JdbcUtils.displaySize(sqlType());
|
return this != OBJECT && this != NESTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
default boolean isSigned() {
|
|
||||||
return JdbcUtils.isSigned(sqlType());
|
|
||||||
}
|
|
||||||
|
|
||||||
default boolean isInteger() {
|
|
||||||
return JdbcUtils.isInteger(sqlType());
|
|
||||||
}
|
|
||||||
|
|
||||||
default boolean isRational() {
|
|
||||||
return JdbcUtils.isRational(sqlType());
|
|
||||||
}
|
|
||||||
|
|
||||||
default boolean isNumeric() {
|
|
||||||
return isInteger() || isRational();
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean isPrimitive();
|
|
||||||
|
|
||||||
default boolean same(DataType other) {
|
|
||||||
return getClass() == other.getClass();
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -25,7 +25,7 @@ public abstract class DataTypeConversion {
|
||||||
private static final DateTimeFormatter UTC_DATE_FORMATTER = ISODateTimeFormat.dateTimeNoMillis().withZoneUTC();
|
private static final DateTimeFormatter UTC_DATE_FORMATTER = ISODateTimeFormat.dateTimeNoMillis().withZoneUTC();
|
||||||
|
|
||||||
public static DataType commonType(DataType left, DataType right) {
|
public static DataType commonType(DataType left, DataType right) {
|
||||||
if (left.same(right)) {
|
if (left == right) {
|
||||||
return left;
|
return left;
|
||||||
}
|
}
|
||||||
if (DataTypes.isNull(left)) {
|
if (DataTypes.isNull(left)) {
|
||||||
|
@ -36,27 +36,27 @@ public abstract class DataTypeConversion {
|
||||||
}
|
}
|
||||||
if (left.isNumeric() && right.isNumeric()) {
|
if (left.isNumeric() && right.isNumeric()) {
|
||||||
// if one is int
|
// if one is int
|
||||||
if (left.isInteger()) {
|
if (left.isInteger) {
|
||||||
// promote the highest int
|
// promote the highest int
|
||||||
if (right.isInteger()) {
|
if (right.isInteger) {
|
||||||
return left.size() > right.size() ? left : right;
|
return left.size > right.size ? left : right;
|
||||||
}
|
}
|
||||||
// promote the rational
|
// promote the rational
|
||||||
return right;
|
return right;
|
||||||
}
|
}
|
||||||
// try the other side
|
// try the other side
|
||||||
if (right.isInteger()) {
|
if (right.isInteger) {
|
||||||
return left;
|
return left;
|
||||||
}
|
}
|
||||||
// promote the highest rational
|
// promote the highest rational
|
||||||
return left.size() > right.size() ? left : right;
|
return left.size > right.size ? left : right;
|
||||||
}
|
}
|
||||||
if (left instanceof StringType) {
|
if (left.isString()) {
|
||||||
if (right.isNumeric()) {
|
if (right.isNumeric()) {
|
||||||
return right;
|
return right;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (right instanceof StringType) {
|
if (right.isString()) {
|
||||||
if (left.isNumeric()) {
|
if (left.isNumeric()) {
|
||||||
return left;
|
return left;
|
||||||
}
|
}
|
||||||
|
@ -74,25 +74,27 @@ public abstract class DataTypeConversion {
|
||||||
if (from.getClass() == to.getClass()) {
|
if (from.getClass() == to.getClass()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (from instanceof NullType) {
|
if (from == DataType.NULL) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// anything can be converted to String
|
// anything can be converted to String
|
||||||
if (to instanceof StringType) {
|
if (to.isString()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// also anything can be converted into a bool
|
// also anything can be converted into a bool
|
||||||
if (to instanceof BooleanType) {
|
if (to == DataType.BOOLEAN) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// numeric conversion
|
// numeric conversion
|
||||||
if ((from instanceof StringType || from instanceof BooleanType || from instanceof DateType || from.isNumeric()) && to.isNumeric()) {
|
if ((from.isString() || from == DataType.BOOLEAN || from == DataType.DATE || from.isNumeric()) && to.isNumeric()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// date conversion
|
// date conversion
|
||||||
if ((from instanceof DateType || from instanceof StringType || from.isNumeric()) && to instanceof DateType) {
|
if ((from == DataType.DATE || from.isString() || from.isNumeric()) && to == DataType.DATE) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,150 +105,145 @@ public abstract class DataTypeConversion {
|
||||||
* Get the conversion from one type to another.
|
* Get the conversion from one type to another.
|
||||||
*/
|
*/
|
||||||
public static Conversion conversionFor(DataType from, DataType to) {
|
public static Conversion conversionFor(DataType from, DataType to) {
|
||||||
if (to instanceof StringType) {
|
switch (to) {
|
||||||
return conversionToString(from);
|
case KEYWORD:
|
||||||
|
case TEXT:
|
||||||
|
return conversionToString(from);
|
||||||
|
case LONG:
|
||||||
|
return conversionToLong(from);
|
||||||
|
case INTEGER:
|
||||||
|
return conversionToInt(from);
|
||||||
|
case SHORT:
|
||||||
|
return conversionToShort(from);
|
||||||
|
case BYTE:
|
||||||
|
return conversionToByte(from);
|
||||||
|
case FLOAT:
|
||||||
|
return conversionToFloat(from);
|
||||||
|
case DOUBLE:
|
||||||
|
return conversionToDouble(from);
|
||||||
|
case DATE:
|
||||||
|
return conversionToDate(from);
|
||||||
|
case BOOLEAN:
|
||||||
|
return conversionToBoolean(from);
|
||||||
|
default:
|
||||||
|
throw new SqlIllegalArgumentException("cannot convert from [" + from + "] to [" + to + "]");
|
||||||
}
|
}
|
||||||
if (to instanceof LongType) {
|
|
||||||
return conversionToLong(from);
|
|
||||||
}
|
|
||||||
if (to instanceof IntegerType) {
|
|
||||||
return conversionToInt(from);
|
|
||||||
}
|
|
||||||
if (to instanceof ShortType) {
|
|
||||||
return conversionToShort(from);
|
|
||||||
}
|
|
||||||
if (to instanceof ByteType) {
|
|
||||||
return conversionToByte(from);
|
|
||||||
}
|
|
||||||
if (to instanceof FloatType) {
|
|
||||||
return conversionToFloat(from);
|
|
||||||
}
|
|
||||||
if (to instanceof DoubleType) {
|
|
||||||
return conversionToDouble(from);
|
|
||||||
}
|
|
||||||
if (to instanceof DateType) {
|
|
||||||
return conversionToDate(from);
|
|
||||||
}
|
|
||||||
if (to instanceof BooleanType) {
|
|
||||||
return conversionToBoolean(from);
|
|
||||||
}
|
|
||||||
throw new SqlIllegalArgumentException("cannot convert from [" + from + "] to [" + to + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToString(DataType from) {
|
private static Conversion conversionToString(DataType from) {
|
||||||
if (from instanceof DateType) {
|
if (from == DataType.DATE) {
|
||||||
return Conversion.DATE_TO_STRING;
|
return Conversion.DATE_TO_STRING;
|
||||||
}
|
}
|
||||||
return Conversion.OTHER_TO_STRING;
|
return Conversion.OTHER_TO_STRING;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToLong(DataType from) {
|
private static Conversion conversionToLong(DataType from) {
|
||||||
if (from.isRational()) {
|
if (from.isRational) {
|
||||||
return Conversion.RATIONAL_TO_LONG;
|
return Conversion.RATIONAL_TO_LONG;
|
||||||
}
|
}
|
||||||
if (from.isInteger()) {
|
if (from.isInteger) {
|
||||||
return Conversion.INTEGER_TO_LONG;
|
return Conversion.INTEGER_TO_LONG;
|
||||||
}
|
}
|
||||||
if (from instanceof BooleanType) {
|
if (from == DataType.BOOLEAN) {
|
||||||
return Conversion.BOOL_TO_INT; // We emit an int here which is ok because of Java's casting rules
|
return Conversion.BOOL_TO_INT; // We emit an int here which is ok because of Java's casting rules
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_LONG;
|
return Conversion.STRING_TO_LONG;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert from [" + from + "] to [Long]");
|
throw new SqlIllegalArgumentException("cannot convert from [" + from + "] to [Long]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToInt(DataType from) {
|
private static Conversion conversionToInt(DataType from) {
|
||||||
if (from.isRational()) {
|
if (from.isRational) {
|
||||||
return Conversion.RATIONAL_TO_INT;
|
return Conversion.RATIONAL_TO_INT;
|
||||||
}
|
}
|
||||||
if (from.isInteger()) {
|
if (from.isInteger) {
|
||||||
return Conversion.INTEGER_TO_INT;
|
return Conversion.INTEGER_TO_INT;
|
||||||
}
|
}
|
||||||
if (from instanceof BooleanType) {
|
if (from == DataType.BOOLEAN) {
|
||||||
return Conversion.BOOL_TO_INT;
|
return Conversion.BOOL_TO_INT;
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_INT;
|
return Conversion.STRING_TO_INT;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert from [" + from + "] to [Integer]");
|
throw new SqlIllegalArgumentException("cannot convert from [" + from + "] to [Integer]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToShort(DataType from) {
|
private static Conversion conversionToShort(DataType from) {
|
||||||
if (from.isRational()) {
|
if (from.isRational) {
|
||||||
return Conversion.RATIONAL_TO_SHORT;
|
return Conversion.RATIONAL_TO_SHORT;
|
||||||
}
|
}
|
||||||
if (from.isInteger()) {
|
if (from.isInteger) {
|
||||||
return Conversion.INTEGER_TO_SHORT;
|
return Conversion.INTEGER_TO_SHORT;
|
||||||
}
|
}
|
||||||
if (from instanceof BooleanType) {
|
if (from == DataType.BOOLEAN) {
|
||||||
return Conversion.BOOL_TO_SHORT;
|
return Conversion.BOOL_TO_SHORT;
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_SHORT;
|
return Conversion.STRING_TO_SHORT;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Short]");
|
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Short]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToByte(DataType from) {
|
private static Conversion conversionToByte(DataType from) {
|
||||||
if (from.isRational()) {
|
if (from.isRational) {
|
||||||
return Conversion.RATIONAL_TO_BYTE;
|
return Conversion.RATIONAL_TO_BYTE;
|
||||||
}
|
}
|
||||||
if (from.isInteger()) {
|
if (from.isInteger) {
|
||||||
return Conversion.INTEGER_TO_BYTE;
|
return Conversion.INTEGER_TO_BYTE;
|
||||||
}
|
}
|
||||||
if (from instanceof BooleanType) {
|
if (from == DataType.BOOLEAN) {
|
||||||
return Conversion.BOOL_TO_BYTE;
|
return Conversion.BOOL_TO_BYTE;
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_BYTE;
|
return Conversion.STRING_TO_BYTE;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Byte]");
|
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Byte]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToFloat(DataType from) {
|
private static Conversion conversionToFloat(DataType from) {
|
||||||
if (from.isRational()) {
|
if (from.isRational) {
|
||||||
return Conversion.RATIONAL_TO_FLOAT;
|
return Conversion.RATIONAL_TO_FLOAT;
|
||||||
}
|
}
|
||||||
if (from.isInteger()) {
|
if (from.isInteger) {
|
||||||
return Conversion.INTEGER_TO_FLOAT;
|
return Conversion.INTEGER_TO_FLOAT;
|
||||||
}
|
}
|
||||||
if (from instanceof BooleanType) {
|
if (from == DataType.BOOLEAN) {
|
||||||
return Conversion.BOOL_TO_FLOAT;
|
return Conversion.BOOL_TO_FLOAT;
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_FLOAT;
|
return Conversion.STRING_TO_FLOAT;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Float]");
|
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Float]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToDouble(DataType from) {
|
private static Conversion conversionToDouble(DataType from) {
|
||||||
if (from.isRational()) {
|
if (from.isRational) {
|
||||||
return Conversion.RATIONAL_TO_DOUBLE;
|
return Conversion.RATIONAL_TO_DOUBLE;
|
||||||
}
|
}
|
||||||
if (from.isInteger()) {
|
if (from.isInteger) {
|
||||||
return Conversion.INTEGER_TO_DOUBLE;
|
return Conversion.INTEGER_TO_DOUBLE;
|
||||||
}
|
}
|
||||||
if (from instanceof BooleanType) {
|
if (from == DataType.BOOLEAN) {
|
||||||
return Conversion.BOOL_TO_DOUBLE;
|
return Conversion.BOOL_TO_DOUBLE;
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_DOUBLE;
|
return Conversion.STRING_TO_DOUBLE;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Double]");
|
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Double]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Conversion conversionToDate(DataType from) {
|
private static Conversion conversionToDate(DataType from) {
|
||||||
if (from.isRational()) {
|
if (from.isRational) {
|
||||||
return Conversion.RATIONAL_TO_LONG;
|
return Conversion.RATIONAL_TO_LONG;
|
||||||
}
|
}
|
||||||
if (from.isInteger()) {
|
if (from.isInteger) {
|
||||||
return Conversion.INTEGER_TO_LONG;
|
return Conversion.INTEGER_TO_LONG;
|
||||||
}
|
}
|
||||||
if (from instanceof BooleanType) {
|
if (from == DataType.BOOLEAN) {
|
||||||
return Conversion.BOOL_TO_INT; // We emit an int here which is ok because of Java's casting rules
|
return Conversion.BOOL_TO_INT; // We emit an int here which is ok because of Java's casting rules
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_DATE;
|
return Conversion.STRING_TO_DATE;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Date]");
|
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Date]");
|
||||||
|
@ -256,7 +253,7 @@ public abstract class DataTypeConversion {
|
||||||
if (from.isNumeric()) {
|
if (from.isNumeric()) {
|
||||||
return Conversion.NUMERIC_TO_BOOLEAN;
|
return Conversion.NUMERIC_TO_BOOLEAN;
|
||||||
}
|
}
|
||||||
if (from instanceof StringType) {
|
if (from.isString()) {
|
||||||
return Conversion.STRING_TO_BOOLEAN;
|
return Conversion.STRING_TO_BOOLEAN;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Boolean]");
|
throw new SqlIllegalArgumentException("cannot convert [" + from + "] to [Boolean]");
|
||||||
|
@ -297,12 +294,10 @@ public abstract class DataTypeConversion {
|
||||||
}
|
}
|
||||||
return Booleans.parseBoolean(lowVal);
|
return Booleans.parseBoolean(lowVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Object convert(Object value, DataType dataType) {
|
public static Object convert(Object value, DataType dataType) {
|
||||||
DataType detectedType = DataTypes.fromJava(value);
|
DataType detectedType = DataTypes.fromJava(value);
|
||||||
if (detectedType.equals(dataType)) {
|
if (detectedType.equals(dataType) || value == null) {
|
||||||
return value;
|
|
||||||
}
|
|
||||||
if (value == null) {
|
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
return conversionFor(detectedType, dataType).convert(value);
|
return conversionFor(detectedType, dataType).convert(value);
|
||||||
|
@ -337,12 +332,11 @@ public abstract class DataTypeConversion {
|
||||||
STRING_TO_FLOAT(fromString(Float::valueOf, "Float")),
|
STRING_TO_FLOAT(fromString(Float::valueOf, "Float")),
|
||||||
RATIONAL_TO_DOUBLE(fromDouble(value -> value)),
|
RATIONAL_TO_DOUBLE(fromDouble(value -> value)),
|
||||||
INTEGER_TO_DOUBLE(fromLong(Double::valueOf)),
|
INTEGER_TO_DOUBLE(fromLong(Double::valueOf)),
|
||||||
BOOL_TO_DOUBLE(fromBool(value -> value ? 1d: 0d)),
|
BOOL_TO_DOUBLE(fromBool(value -> value ? 1d : 0d)),
|
||||||
STRING_TO_DOUBLE(fromString(Double::valueOf, "Double")),
|
STRING_TO_DOUBLE(fromString(Double::valueOf, "Double")),
|
||||||
STRING_TO_DATE(fromString(UTC_DATE_FORMATTER::parseMillis, "Date")),
|
STRING_TO_DATE(fromString(UTC_DATE_FORMATTER::parseMillis, "Date")),
|
||||||
NUMERIC_TO_BOOLEAN(fromLong(value -> value != 0)),
|
NUMERIC_TO_BOOLEAN(fromLong(value -> value != 0)),
|
||||||
STRING_TO_BOOLEAN(fromString(DataTypeConversion::convertToBoolean, "Boolean")),
|
STRING_TO_BOOLEAN(fromString(DataTypeConversion::convertToBoolean, "Boolean")),;
|
||||||
;
|
|
||||||
|
|
||||||
private final Function<Object, Object> converter;
|
private final Function<Object, Object> converter;
|
||||||
|
|
||||||
|
@ -387,6 +381,6 @@ public abstract class DataTypeConversion {
|
||||||
return dataType;
|
return dataType;
|
||||||
}
|
}
|
||||||
|
|
||||||
return dataType.isInteger() ? dataType : DataTypes.LONG;
|
return dataType.isInteger ? dataType : DataType.LONG;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,119 +8,47 @@ package org.elasticsearch.xpack.sql.type;
|
||||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public abstract class DataTypes {
|
public abstract class DataTypes {
|
||||||
|
|
||||||
public static final DataType NULL = new NullType();
|
|
||||||
public static final DataType BOOLEAN = new BooleanType(true);
|
|
||||||
public static final DataType BYTE = new ByteType(true);
|
|
||||||
public static final DataType SHORT = new ShortType(true);
|
|
||||||
public static final DataType INTEGER = new IntegerType(true);
|
|
||||||
public static final DataType LONG = new LongType(true);
|
|
||||||
public static final DataType DOUBLE = new DoubleType(true);
|
|
||||||
public static final DataType FLOAT = new FloatType(true);
|
|
||||||
public static final DataType HALF_FLOAT = new HalfFloatType(true);
|
|
||||||
public static final DataType IP_TYPE = new IpType(true);
|
|
||||||
public static final DataType KEYWORD = KeywordType.DEFAULT;
|
|
||||||
public static final DataType TEXT = new TextType();
|
|
||||||
|
|
||||||
public static final DataType GEO_POINT = new GeoPointType();
|
|
||||||
public static final DataType DATE = DateType.DEFAULT;
|
|
||||||
|
|
||||||
public static final DataType BINARY = new BinaryType(true);
|
|
||||||
|
|
||||||
private static final Map<String, DataType> ES_PRIMITIVES_DEFAULT = new LinkedHashMap<>();
|
|
||||||
private static final Map<String, DataType> ES_PRIMITIVES_NO_DOC_VALUES = new LinkedHashMap<>();
|
|
||||||
private static final Map<JDBCType, DataType> JDBC_TO_TYPES = new LinkedHashMap<>();
|
|
||||||
|
|
||||||
static {
|
|
||||||
initDefault(NULL);
|
|
||||||
initDefault(BOOLEAN);
|
|
||||||
initDefault(BYTE);
|
|
||||||
initDefault(SHORT);
|
|
||||||
initDefault(INTEGER);
|
|
||||||
initDefault(LONG);
|
|
||||||
initDefault(DOUBLE);
|
|
||||||
initDefault(FLOAT);
|
|
||||||
initDefault(HALF_FLOAT);
|
|
||||||
initDefault(IP_TYPE);
|
|
||||||
// text and keyword are handled separately
|
|
||||||
initDefault(BINARY);
|
|
||||||
|
|
||||||
//init(GEO_POINT);
|
|
||||||
|
|
||||||
for (DataType type : ES_PRIMITIVES_DEFAULT.values()) {
|
|
||||||
JDBC_TO_TYPES.put(type.sqlType(), type);
|
|
||||||
}
|
|
||||||
|
|
||||||
initNoDocValues(NULL);
|
|
||||||
initNoDocValues(new BooleanType(false));
|
|
||||||
initNoDocValues(new ByteType(false));
|
|
||||||
initNoDocValues(new ShortType(false));
|
|
||||||
initNoDocValues(new IntegerType(false));
|
|
||||||
initNoDocValues(new LongType(false));
|
|
||||||
initNoDocValues(new DoubleType(false));
|
|
||||||
initNoDocValues(new FloatType(false));
|
|
||||||
initNoDocValues(new HalfFloatType(false));
|
|
||||||
initNoDocValues(new IpType(false));
|
|
||||||
initNoDocValues(new BinaryType(false));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void initDefault(DataType type) {
|
|
||||||
ES_PRIMITIVES_DEFAULT.put(type.esName(), type);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void initNoDocValues(DataType type) {
|
|
||||||
ES_PRIMITIVES_NO_DOC_VALUES.put(type.esName(), type);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean isNull(DataType from) {
|
public static boolean isNull(DataType from) {
|
||||||
return from instanceof NullType;
|
return from == DataType.NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean isUnsupported(DataType from) {
|
public static boolean isUnsupported(DataType from) {
|
||||||
return from instanceof UnsupportedDataType;
|
return from == DataType.UNSUPPORTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DataType fromJava(Object value) {
|
public static DataType fromJava(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return NULL;
|
return DataType.NULL;
|
||||||
}
|
}
|
||||||
if (value instanceof Integer) {
|
if (value instanceof Integer) {
|
||||||
return INTEGER;
|
return DataType.INTEGER;
|
||||||
}
|
}
|
||||||
if (value instanceof Long) {
|
if (value instanceof Long) {
|
||||||
return LONG;
|
return DataType.LONG;
|
||||||
}
|
}
|
||||||
if (value instanceof Boolean) {
|
if (value instanceof Boolean) {
|
||||||
return BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
if (value instanceof Double) {
|
if (value instanceof Double) {
|
||||||
return DOUBLE;
|
return DataType.DOUBLE;
|
||||||
}
|
}
|
||||||
if (value instanceof Float) {
|
if (value instanceof Float) {
|
||||||
return FLOAT;
|
return DataType.FLOAT;
|
||||||
}
|
}
|
||||||
if (value instanceof Byte) {
|
if (value instanceof Byte) {
|
||||||
return BYTE;
|
return DataType.BYTE;
|
||||||
}
|
}
|
||||||
if (value instanceof Short) {
|
if (value instanceof Short) {
|
||||||
return SHORT;
|
return DataType.SHORT;
|
||||||
}
|
}
|
||||||
if (value instanceof DateTime) {
|
if (value instanceof DateTime) {
|
||||||
return DATE;
|
return DataType.DATE;
|
||||||
}
|
}
|
||||||
if (value instanceof String) {
|
if (value instanceof String) {
|
||||||
return KEYWORD;
|
return DataType.KEYWORD;
|
||||||
}
|
}
|
||||||
throw new SqlIllegalArgumentException("No idea what's the DataType for %s", value.getClass());
|
throw new SqlIllegalArgumentException("No idea what's the DataType for %s", value.getClass());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DataType fromEsName(String typeString, boolean docValuesEnabled) {
|
|
||||||
DataType t = docValuesEnabled ? ES_PRIMITIVES_DEFAULT.get(typeString) : ES_PRIMITIVES_NO_DOC_VALUES.get(typeString);
|
|
||||||
return t != null ? t : new UnsupportedDataType(typeString);
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.type;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.util.CollectionUtils;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SQL-related information about an index field with date type
|
||||||
|
*/
|
||||||
|
public class DateEsField extends EsField {
|
||||||
|
|
||||||
|
public static final List<String> DEFAULT_FORMAT = Arrays.asList("strict_date_optional_time", "epoch_millis");
|
||||||
|
private final List<String> formats;
|
||||||
|
|
||||||
|
public DateEsField(String name, Map<String, EsField> properties, boolean hasDocValues, String... formats) {
|
||||||
|
super(name, DataType.DATE, properties, hasDocValues);
|
||||||
|
this.formats = CollectionUtils.isEmpty(formats) ? DEFAULT_FORMAT : Arrays.asList(formats);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPrecision() {
|
||||||
|
// same as Long
|
||||||
|
// TODO: based this on format string
|
||||||
|
return 19;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getFormats() {
|
||||||
|
return formats;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
if (!super.equals(o)) return false;
|
||||||
|
DateEsField dateField = (DateEsField) o;
|
||||||
|
return Objects.equals(formats, dateField.formats);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(super.hashCode(), formats);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,53 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.util.CollectionUtils;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class DateType extends AbstractDataType {
|
|
||||||
|
|
||||||
public static final List<String> DEFAULT_FORMAT = Arrays.asList("strict_date_optional_time", "epoch_millis");
|
|
||||||
public static final DateType DEFAULT = new DateType(true);
|
|
||||||
|
|
||||||
private final List<String> formats;
|
|
||||||
|
|
||||||
DateType(boolean docValues, String... formats) {
|
|
||||||
/* Since we normalize timestamps to UTC for storage and do not keep
|
|
||||||
* the origination zone information information we are technically
|
|
||||||
* `TIMESTAMP WITHOUT TIME ZONE` or just `TIMESTAMP`, or, in Oracle
|
|
||||||
* parlance, `TIMESTAMP WITH LOCAL TIME ZONE`.
|
|
||||||
* `TIMESTAMP WITH TIME ZONE` implies that we store the original
|
|
||||||
* time zone of the even. Confusingly, PostgreSQL's
|
|
||||||
* `TIMESTAMP WITH TIME ZONE` type does not store original time zone,
|
|
||||||
* unlike H2 and Oracle, *but* it is aware of the session's time zone
|
|
||||||
* so it is preferred. But it is *weird*. As bad as it feels not to
|
|
||||||
* be like PostgreSQL, we are going to not be like PostgreSQL here
|
|
||||||
* and return TIMESTAMP so we more closely conform with H2 and
|
|
||||||
* (shudder) Oracle. */
|
|
||||||
super(JDBCType.TIMESTAMP, docValues);
|
|
||||||
this.formats = CollectionUtils.isEmpty(formats) ? DEFAULT_FORMAT : Arrays.asList(formats);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "date";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int precision() {
|
|
||||||
// same as Long
|
|
||||||
// TODO: based this on format string
|
|
||||||
return 19;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> formats() {
|
|
||||||
return formats;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class DoubleType extends AbstractDataType {
|
|
||||||
|
|
||||||
DoubleType(boolean docValues) {
|
|
||||||
super(JDBCType.DOUBLE, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "double";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,103 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.type;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SQL-related information about an index field
|
||||||
|
*/
|
||||||
|
public class EsField {
|
||||||
|
private final DataType esDataType;
|
||||||
|
private final boolean hasDocValues;
|
||||||
|
private final Map<String, EsField> properties;
|
||||||
|
private final String name;
|
||||||
|
|
||||||
|
public EsField(String name, DataType esDataType, Map<String, EsField> properties, boolean hasDocValues) {
|
||||||
|
this.name = name;
|
||||||
|
this.esDataType = esDataType;
|
||||||
|
this.hasDocValues = hasDocValues;
|
||||||
|
this.properties = properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the field path
|
||||||
|
*/
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The field type
|
||||||
|
*/
|
||||||
|
public DataType getDataType() {
|
||||||
|
return esDataType;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The field supports doc values
|
||||||
|
*/
|
||||||
|
public boolean hasDocValues() {
|
||||||
|
return hasDocValues;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns list of properties for the nested and object fields, list of subfield if the field
|
||||||
|
* was indexed in a few different ways or null otherwise
|
||||||
|
*/
|
||||||
|
@Nullable
|
||||||
|
public Map<String, EsField> getProperties() {
|
||||||
|
return properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the path to the keyword version of this field if this field is text and it has a subfield that is
|
||||||
|
* indexed as keyword, null if such field is not found or the field name itself in all other cases
|
||||||
|
*/
|
||||||
|
public EsField getExactField() {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the precision of the field
|
||||||
|
* <p>
|
||||||
|
* Precision is the specified column size. For numeric data, this is the maximum precision. For character
|
||||||
|
* data, this is the length in characters. For datetime datatypes, this is the length in characters of the
|
||||||
|
* String representation (assuming the maximum allowed defaultPrecision of the fractional seconds component).
|
||||||
|
*/
|
||||||
|
public int getPrecision() {
|
||||||
|
return esDataType.defaultPrecision;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* True if this field name can be used in sorting, aggregations and term queries as is
|
||||||
|
* <p>
|
||||||
|
* This will be true for most fields except analyzed text fields that cannot be used directly and should be
|
||||||
|
* replaced with the field returned by {@link EsField#getExactField()} instead.
|
||||||
|
*/
|
||||||
|
public boolean isExact() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
EsField field = (EsField) o;
|
||||||
|
return hasDocValues == field.hasDocValues &&
|
||||||
|
esDataType == field.esDataType &&
|
||||||
|
Objects.equals(properties, field.properties) &&
|
||||||
|
Objects.equals(name, field.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(esDataType, hasDocValues, properties, name);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class FloatType extends AbstractDataType {
|
|
||||||
|
|
||||||
FloatType(boolean docValues) {
|
|
||||||
super(JDBCType.REAL, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "float";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class GeoPointType extends AbstractDataType {
|
|
||||||
|
|
||||||
GeoPointType() {
|
|
||||||
super(JDBCType.NULL, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "geo-point";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JDBCType sqlType() {
|
|
||||||
throw new UnsupportedOperationException("need to determine actual format");
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class HalfFloatType extends AbstractDataType {
|
|
||||||
|
|
||||||
HalfFloatType(boolean docValues) {
|
|
||||||
super(JDBCType.FLOAT, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "half_float";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class IntegerType extends AbstractDataType {
|
|
||||||
|
|
||||||
IntegerType(boolean docValues) {
|
|
||||||
super(JDBCType.INTEGER, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "integer";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class IpType extends AbstractDataType {
|
|
||||||
|
|
||||||
IpType(boolean docValues) {
|
|
||||||
super(JDBCType.VARCHAR, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "ip";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int precision() {
|
|
||||||
// maximum address in IPv6
|
|
||||||
return 39;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,122 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
abstract class JdbcUtils {
|
|
||||||
|
|
||||||
static boolean isSigned(JDBCType type) {
|
|
||||||
switch (type) {
|
|
||||||
case BIGINT:
|
|
||||||
case DECIMAL:
|
|
||||||
case DOUBLE:
|
|
||||||
case INTEGER:
|
|
||||||
case SMALLINT:
|
|
||||||
case FLOAT:
|
|
||||||
case REAL:
|
|
||||||
case NUMERIC:
|
|
||||||
case TINYINT:
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static int scale(JDBCType type) {
|
|
||||||
switch (type) {
|
|
||||||
case REAL: return 7;
|
|
||||||
case FLOAT:
|
|
||||||
case DOUBLE: return 16;
|
|
||||||
default: return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static int precision(JDBCType type) {
|
|
||||||
switch (type) {
|
|
||||||
case NULL: return 0;
|
|
||||||
case BOOLEAN: return 1;
|
|
||||||
case TINYINT: return 3;
|
|
||||||
case SMALLINT: return 5;
|
|
||||||
case INTEGER: return 10;
|
|
||||||
case BIGINT: return 19;
|
|
||||||
// 24 bits precision - 24*log10(2) =~ 7 (7.22)
|
|
||||||
case REAL: return 7;
|
|
||||||
// 53 bits precision ~ 16(15.95) decimal digits (53log10(2))
|
|
||||||
case FLOAT:
|
|
||||||
case DOUBLE: return 16;
|
|
||||||
case VARBINARY:
|
|
||||||
case VARCHAR:
|
|
||||||
return Integer.MAX_VALUE;
|
|
||||||
case TIME_WITH_TIMEZONE: return displaySize(type);
|
|
||||||
default:
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static int displaySize(JDBCType type) {
|
|
||||||
switch (type) {
|
|
||||||
case NULL: return 0;
|
|
||||||
case BOOLEAN: return 1;
|
|
||||||
case TINYINT: return 3;
|
|
||||||
case SMALLINT: return 6;
|
|
||||||
case INTEGER: return 11;
|
|
||||||
case BIGINT: return 20;
|
|
||||||
case REAL: return 15;
|
|
||||||
case FLOAT:
|
|
||||||
case DOUBLE: return 25;
|
|
||||||
case VARCHAR:
|
|
||||||
case VARBINARY: return 0;
|
|
||||||
case TIMESTAMP: return 20;
|
|
||||||
default:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static boolean isRational(JDBCType type) {
|
|
||||||
switch (type) {
|
|
||||||
case REAL:
|
|
||||||
case DOUBLE:
|
|
||||||
case FLOAT:
|
|
||||||
case DECIMAL:
|
|
||||||
case NUMERIC:
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static boolean isInteger(JDBCType type) {
|
|
||||||
switch (type) {
|
|
||||||
case TINYINT:
|
|
||||||
case SMALLINT:
|
|
||||||
case INTEGER:
|
|
||||||
case BIGINT:
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static int size(JDBCType type) {
|
|
||||||
switch (type) {
|
|
||||||
case NULL: return 0;
|
|
||||||
case BOOLEAN: return 1;
|
|
||||||
case TINYINT: return Byte.BYTES;
|
|
||||||
case SMALLINT: return Short.BYTES;
|
|
||||||
case INTEGER: return Integer.BYTES;
|
|
||||||
case TIMESTAMP:
|
|
||||||
case BIGINT: return Long.BYTES;
|
|
||||||
case REAL: return Float.BYTES;
|
|
||||||
case FLOAT:
|
|
||||||
case DOUBLE: return Double.BYTES;
|
|
||||||
case VARCHAR:
|
|
||||||
case VARBINARY: return Integer.MAX_VALUE;
|
|
||||||
default:
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.type;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SQL-related information about an index field with keyword type
|
||||||
|
*/
|
||||||
|
public class KeywordEsField extends EsField {
|
||||||
|
|
||||||
|
private final int precision;
|
||||||
|
private final boolean normalized;
|
||||||
|
|
||||||
|
public KeywordEsField(String name) {
|
||||||
|
this(name, Collections.emptyMap(), true, DataType.KEYWORD.defaultPrecision, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public KeywordEsField(String name, Map<String, EsField> properties, boolean hasDocValues, int precision, boolean normalized) {
|
||||||
|
super(name, DataType.KEYWORD, properties, hasDocValues);
|
||||||
|
this.precision = precision;
|
||||||
|
this.normalized = normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPrecision() {
|
||||||
|
return precision;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isExact() {
|
||||||
|
return normalized == false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
if (!super.equals(o)) return false;
|
||||||
|
KeywordEsField that = (KeywordEsField) o;
|
||||||
|
return precision == that.precision &&
|
||||||
|
normalized == that.normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
|
||||||
|
return Objects.hash(super.hashCode(), precision, normalized);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,62 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
|
|
||||||
public class KeywordType extends StringType {
|
|
||||||
|
|
||||||
static final int DEFAULT_LENGTH = 256;
|
|
||||||
static final boolean DEFAULT_NORMALIZED = false;
|
|
||||||
static final KeywordType DEFAULT = new KeywordType(true, DEFAULT_LENGTH, DEFAULT_NORMALIZED, emptyMap());
|
|
||||||
|
|
||||||
private final int length;
|
|
||||||
private final boolean normalized;
|
|
||||||
|
|
||||||
KeywordType(boolean docValues, int length, boolean normalized, Map<String, DataType> fields) {
|
|
||||||
super(docValues, fields);
|
|
||||||
this.length = length;
|
|
||||||
this.normalized = normalized;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isInexact() {
|
|
||||||
return normalized;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isNormalized() {
|
|
||||||
return normalized;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "keyword";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int precision() {
|
|
||||||
return length;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(length, hasDocValues(), fields());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
return super.equals(obj) && length == ((KeywordType) obj).length;
|
|
||||||
}
|
|
||||||
|
|
||||||
static DataType from(boolean docValues, int length, boolean normalized, Map<String, DataType> fields) {
|
|
||||||
return docValues && length == DEFAULT_LENGTH && fields.isEmpty() && normalized == DEFAULT_NORMALIZED
|
|
||||||
? DEFAULT
|
|
||||||
: new KeywordType(docValues, length, normalized, fields);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class LongType extends AbstractDataType {
|
|
||||||
|
|
||||||
LongType(boolean docValues) {
|
|
||||||
super(JDBCType.BIGINT, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "long";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public class NestedType extends CompoundDataType {
|
|
||||||
|
|
||||||
public NestedType(Map<String, DataType> properties) {
|
|
||||||
super(JDBCType.STRUCT, false, properties);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "nested";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "N" + properties();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class NullType extends AbstractDataType {
|
|
||||||
|
|
||||||
NullType() {
|
|
||||||
super(JDBCType.NULL, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "null";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
|
|
||||||
public class ObjectType extends CompoundDataType {
|
|
||||||
|
|
||||||
public static final ObjectType EMPTY = new ObjectType(emptyMap());
|
|
||||||
|
|
||||||
ObjectType(Map<String, DataType> properties) {
|
|
||||||
super(JDBCType.STRUCT, false, properties);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "object";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "O" + properties();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class ScaledFloatType extends AbstractDataType {
|
|
||||||
|
|
||||||
public ScaledFloatType(boolean docValues) {
|
|
||||||
super(JDBCType.FLOAT, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "scaled_float";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int precision() {
|
|
||||||
// just like long
|
|
||||||
return 19;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -120,7 +120,7 @@ public class Schema implements Iterable<Entry> {
|
||||||
}
|
}
|
||||||
sb.append(names.get(i));
|
sb.append(names.get(i));
|
||||||
sb.append(":");
|
sb.append(":");
|
||||||
sb.append(types.get(i).esName());
|
sb.append(types.get(i).esType);
|
||||||
}
|
}
|
||||||
sb.append("]");
|
sb.append("]");
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class ShortType extends AbstractDataType {
|
|
||||||
|
|
||||||
ShortType(boolean docValues) {
|
|
||||||
super(JDBCType.SMALLINT, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "short";
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,86 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Map.Entry;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
|
|
||||||
// String type is a special type of CompoundDataType
|
|
||||||
public abstract class StringType extends CompoundDataType {
|
|
||||||
|
|
||||||
private final boolean docValue;
|
|
||||||
private final Map<String, DataType> fields;
|
|
||||||
private final Map<String, KeywordType> exactKeywords;
|
|
||||||
|
|
||||||
|
|
||||||
StringType(boolean docValue, Map<String, DataType> fields) {
|
|
||||||
super(JDBCType.VARCHAR, docValue, fields);
|
|
||||||
|
|
||||||
this.docValue = docValue;
|
|
||||||
this.fields = fields;
|
|
||||||
|
|
||||||
if (docValue || fields.isEmpty()) {
|
|
||||||
exactKeywords = emptyMap();
|
|
||||||
} else {
|
|
||||||
exactKeywords = new LinkedHashMap<>();
|
|
||||||
for (Entry<String, DataType> entry : fields.entrySet()) {
|
|
||||||
DataType t = entry.getValue();
|
|
||||||
// consider only non-normalized keywords
|
|
||||||
if (t instanceof KeywordType) {
|
|
||||||
KeywordType kt = (KeywordType) t;
|
|
||||||
if (!kt.isNormalized()) {
|
|
||||||
exactKeywords.put(entry.getKey(), kt);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract boolean isInexact();
|
|
||||||
|
|
||||||
public Map<String, DataType> fields() {
|
|
||||||
return properties();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, KeywordType> exactKeywords() {
|
|
||||||
return exactKeywords;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPrimitive() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int precision() {
|
|
||||||
return Integer.MAX_VALUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(docValue, fields);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (super.equals(obj)) {
|
|
||||||
StringType other = (StringType) obj;
|
|
||||||
return Objects.equals(docValue, other.docValue)
|
|
||||||
&& Objects.equals(fields(), other.fields());
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return esName();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.type;
|
||||||
|
|
||||||
|
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SQL-related information about an index field with text type
|
||||||
|
*/
|
||||||
|
public class TextEsField extends EsField {
|
||||||
|
|
||||||
|
public TextEsField(String name, Map<String, EsField> properties, boolean hasDocValues) {
|
||||||
|
super(name, DataType.TEXT, properties, hasDocValues);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public EsField getExactField() {
|
||||||
|
EsField field = null;
|
||||||
|
for (EsField property : getProperties().values()) {
|
||||||
|
if (property.getDataType() == DataType.KEYWORD && property.isExact()) {
|
||||||
|
if (field != null) {
|
||||||
|
throw new MappingException("Multiple exact keyword candidates available for [" + getName() +
|
||||||
|
"]; specify which one to use");
|
||||||
|
}
|
||||||
|
field = property;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (field == null) {
|
||||||
|
throw new MappingException("No keyword/multi-field defined exact matches for [" + getName() +
|
||||||
|
"]; define one or use MATCH/QUERY instead");
|
||||||
|
}
|
||||||
|
return field;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isExact() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,56 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
import static java.util.Collections.singletonMap;
|
|
||||||
|
|
||||||
public class TextType extends StringType {
|
|
||||||
|
|
||||||
private static final TextType DEFAULT = new TextType(false, singletonMap("keyword", KeywordType.DEFAULT));
|
|
||||||
|
|
||||||
private final boolean fieldData;
|
|
||||||
|
|
||||||
TextType() {
|
|
||||||
this(false, emptyMap());
|
|
||||||
}
|
|
||||||
|
|
||||||
TextType(boolean fieldData, Map<String, DataType> fields) {
|
|
||||||
super(false, fields);
|
|
||||||
this.fieldData = fieldData;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasFieldData() {
|
|
||||||
return fieldData;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isInexact() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "text";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return Objects.hash(super.hashCode(), fieldData);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
return super.equals(obj) && fieldData == ((TextType) obj).fieldData;
|
|
||||||
}
|
|
||||||
|
|
||||||
static DataType from(boolean fieldData, Map<String, DataType> fields) {
|
|
||||||
return DEFAULT.fieldData == fieldData && DEFAULT.fields().equals(fields) ? DEFAULT : new TextType(fieldData, fields);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
|
|
||||||
public class TokenCountType extends AbstractDataType {
|
|
||||||
|
|
||||||
TokenCountType(boolean docValues) {
|
|
||||||
super(JDBCType.INTEGER, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return "token_count";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isInteger() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isRational() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -7,47 +7,19 @@ package org.elasticsearch.xpack.sql.type;
|
||||||
|
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.Collections;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import static java.lang.Math.floor;
|
|
||||||
import static java.lang.Math.log10;
|
|
||||||
import static java.lang.Math.round;
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
import static java.util.Collections.unmodifiableSet;
|
|
||||||
|
|
||||||
public abstract class Types {
|
public abstract class Types {
|
||||||
|
|
||||||
private static final Set<String> KNOWN_TYPES;
|
|
||||||
|
|
||||||
static {
|
|
||||||
Set<String> types = new HashSet<>();
|
|
||||||
types.add("text");
|
|
||||||
types.add("keyword");
|
|
||||||
types.add("long");
|
|
||||||
types.add("integer");
|
|
||||||
types.add("short");
|
|
||||||
types.add("byte");
|
|
||||||
types.add("double");
|
|
||||||
types.add("float");
|
|
||||||
types.add("half_float");
|
|
||||||
types.add("scaled_float");
|
|
||||||
types.add("date");
|
|
||||||
types.add("boolean");
|
|
||||||
types.add("binary");
|
|
||||||
types.add("object");
|
|
||||||
types.add("nested");
|
|
||||||
|
|
||||||
KNOWN_TYPES = unmodifiableSet(types);
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public static Map<String, DataType> fromEs(Map<String, Object> asMap) {
|
public static Map<String, EsField> fromEs(Map<String, Object> asMap) {
|
||||||
Map<String, Object> props = null;
|
Map<String, Object> props = null;
|
||||||
if (asMap != null && !asMap.isEmpty()) {
|
if (asMap != null && !asMap.isEmpty()) {
|
||||||
props = (Map<String, Object>) asMap.get("properties");
|
props = (Map<String, Object>) asMap.get("properties");
|
||||||
|
@ -55,8 +27,8 @@ public abstract class Types {
|
||||||
return props == null || props.isEmpty() ? emptyMap() : startWalking(props);
|
return props == null || props.isEmpty() ? emptyMap() : startWalking(props);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<String, DataType> startWalking(Map<String, Object> mapping) {
|
private static Map<String, EsField> startWalking(Map<String, Object> mapping) {
|
||||||
Map<String, DataType> types = new LinkedHashMap<>();
|
Map<String, EsField> types = new LinkedHashMap<>();
|
||||||
|
|
||||||
if (mapping == null) {
|
if (mapping == null) {
|
||||||
return emptyMap();
|
return emptyMap();
|
||||||
|
@ -68,87 +40,74 @@ public abstract class Types {
|
||||||
return types;
|
return types;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static DataType getType(Map<String, Object> content) {
|
||||||
|
if (content.containsKey("type")) {
|
||||||
|
try {
|
||||||
|
return DataType.valueOf(content.get("type").toString().toUpperCase(Locale.ROOT));
|
||||||
|
} catch (IllegalArgumentException ex) {
|
||||||
|
return DataType.UNSUPPORTED;
|
||||||
|
}
|
||||||
|
} else if (content.containsKey("properties")) {
|
||||||
|
return DataType.OBJECT;
|
||||||
|
} else {
|
||||||
|
return DataType.UNSUPPORTED;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static void walkMapping(String name, Object value, Map<String, DataType> mapping) {
|
private static void walkMapping(String name, Object value, Map<String, EsField> mapping) {
|
||||||
// object type - only root or nested docs supported
|
// object type - only root or nested docs supported
|
||||||
if (value instanceof Map) {
|
if (value instanceof Map) {
|
||||||
Map<String, Object> content = (Map<String, Object>) value;
|
Map<String, Object> content = (Map<String, Object>) value;
|
||||||
|
|
||||||
// extract field type
|
// extract field type
|
||||||
Object type = content.get("type");
|
DataType esDataType = getType(content);
|
||||||
if (type instanceof String) {
|
final Map<String, EsField> properties;
|
||||||
String st = type.toString();
|
if (esDataType == DataType.OBJECT || esDataType == DataType.NESTED) {
|
||||||
|
properties = fromEs(content);
|
||||||
if (knownType(st)) {
|
} else if (content.containsKey("fields")) {
|
||||||
if (isNested(st)) {
|
// Check for multifields
|
||||||
mapping.put(name, new NestedType(fromEs(content)));
|
Object fields = content.get("fields");
|
||||||
} else {
|
if (fields instanceof Map) {
|
||||||
// check dates first to account for the format
|
properties = startWalking((Map<String, Object>) fields);
|
||||||
DataType primitiveType = createPrimitiveType(st, content);
|
|
||||||
if (primitiveType != null) {
|
|
||||||
mapping.put(name, primitiveType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
mapping.put(name, new UnsupportedDataType(st));
|
properties = Collections.emptyMap();
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
properties = Collections.emptyMap();
|
||||||
}
|
}
|
||||||
// object type ?
|
boolean docValues = boolSetting(content.get("doc_values"), esDataType.defaultDocValues);
|
||||||
else if (type == null && content.containsKey("properties")) {
|
final EsField field;
|
||||||
mapping.put(name, new ObjectType(fromEs(content)));
|
switch (esDataType) {
|
||||||
}
|
case TEXT:
|
||||||
// bail out
|
field = new TextEsField(name, properties, docValues);
|
||||||
else {
|
break;
|
||||||
throw new MappingException("Unsupported mapping %s", type);
|
case KEYWORD:
|
||||||
|
int length = intSetting(content.get("ignore_above"), esDataType.defaultPrecision);
|
||||||
|
boolean normalized = Strings.hasText(textSetting(content.get("normalizer"), null));
|
||||||
|
field = new KeywordEsField(name, properties, docValues, length, normalized);
|
||||||
|
break;
|
||||||
|
case DATE:
|
||||||
|
Object fmt = content.get("format");
|
||||||
|
if (fmt != null) {
|
||||||
|
field = new DateEsField(name, properties, docValues, Strings.delimitedListToStringArray(fmt.toString(), "||"));
|
||||||
|
} else {
|
||||||
|
field = new DateEsField(name, properties, docValues);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case UNSUPPORTED:
|
||||||
|
String type = content.get("type").toString();
|
||||||
|
field = new UnsupportedEsField(name, type);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
field = new EsField(name, esDataType, properties, docValues);
|
||||||
}
|
}
|
||||||
|
mapping.put(name, field);
|
||||||
} else {
|
} else {
|
||||||
throw new MappingException("Unrecognized mapping %s", value);
|
throw new IllegalArgumentException("Unrecognized mapping " + value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
private static DataType createPrimitiveType(String typeString, Map<String, Object> content) {
|
|
||||||
// since this setting is available in most types, search for it regardless
|
|
||||||
|
|
||||||
DataType type = null;
|
|
||||||
|
|
||||||
boolean docValues = boolSetting(content.get("doc_values"), true);
|
|
||||||
switch (typeString) {
|
|
||||||
case "date":
|
|
||||||
Object fmt = content.get("format");
|
|
||||||
if (fmt != null) {
|
|
||||||
type = new DateType(docValues, Strings.delimitedListToStringArray(fmt.toString(), "||"));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
type = docValues ? DateType.DEFAULT : new DateType(false);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "text":
|
|
||||||
boolean fieldData = boolSetting(content.get("fielddata"), false);
|
|
||||||
Object value = content.get("fields");
|
|
||||||
Map<String, DataType> fields = emptyMap();
|
|
||||||
if (value instanceof Map) {
|
|
||||||
fields = startWalking((Map<String, Object>) value);
|
|
||||||
}
|
|
||||||
type = TextType.from(fieldData, fields);
|
|
||||||
break;
|
|
||||||
case "keyword":
|
|
||||||
int length = intSetting(content.get("ignore_above"), KeywordType.DEFAULT_LENGTH);
|
|
||||||
boolean normalized = Strings.hasText(textSetting(content.get("normalizer"), null));
|
|
||||||
fields = emptyMap();
|
|
||||||
value = content.get("fields");
|
|
||||||
if (value instanceof Map) {
|
|
||||||
fields = startWalking((Map<String, Object>) value);
|
|
||||||
}
|
|
||||||
type = KeywordType.from(docValues, length, normalized, fields);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
type = DataTypes.fromEsName(typeString, docValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
return type;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String textSetting(Object value, String defaultValue) {
|
private static String textSetting(Object value, String defaultValue) {
|
||||||
return value == null ? defaultValue : value.toString();
|
return value == null ? defaultValue : value.toString();
|
||||||
}
|
}
|
||||||
|
@ -160,17 +119,4 @@ public abstract class Types {
|
||||||
private static int intSetting(Object value, int defaultValue) {
|
private static int intSetting(Object value, int defaultValue) {
|
||||||
return value == null ? defaultValue : Integer.parseInt(value.toString());
|
return value == null ? defaultValue : Integer.parseInt(value.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean knownType(String st) {
|
|
||||||
return KNOWN_TYPES.contains(st);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static boolean isNested(String type) {
|
|
||||||
return "nested".equals(type);
|
|
||||||
}
|
|
||||||
|
|
||||||
static int precision(long number) {
|
|
||||||
long abs = number == Long.MIN_VALUE ? Long.MAX_VALUE : number < 0 ? -number : number;
|
|
||||||
return (int) round(floor(log10(abs))) + 1;
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -1,61 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License;
|
|
||||||
* you may not use this file except in compliance with the Elastic License.
|
|
||||||
*/
|
|
||||||
package org.elasticsearch.xpack.sql.type;
|
|
||||||
|
|
||||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
|
||||||
|
|
||||||
import java.sql.JDBCType;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public class UnsupportedDataType extends AbstractDataType {
|
|
||||||
|
|
||||||
private final String esType;
|
|
||||||
|
|
||||||
UnsupportedDataType(String esType) {
|
|
||||||
super(JDBCType.OTHER, false);
|
|
||||||
this.esType = esType;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String sqlName() {
|
|
||||||
return "UNSUPPORTED";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object defaultValue() {
|
|
||||||
throw new MappingException("Unsupported Elasticsearch type " + esType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isInteger() {
|
|
||||||
throw new MappingException("Unsupported Elasticsearch type " + esType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isRational() {
|
|
||||||
throw new MappingException("Unsupported Elasticsearch type " + esType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean same(DataType other) {
|
|
||||||
return (other instanceof UnsupportedDataType) && Objects.equals(esType, ((UnsupportedDataType) other).esType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String esName() {
|
|
||||||
return esType;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean hasDocValues() {
|
|
||||||
throw new MappingException("Unsupported Elasticsearch type " + esType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPrimitive() {
|
|
||||||
throw new MappingException("Unsupported Elasticsearch type " + esType);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.sql.type;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SQL-related information about an index field that cannot be supported by SQL
|
||||||
|
*/
|
||||||
|
public class UnsupportedEsField extends EsField {
|
||||||
|
|
||||||
|
private String originalType;
|
||||||
|
|
||||||
|
public UnsupportedEsField(String name, String originalType) {
|
||||||
|
super(name, DataType.UNSUPPORTED, Collections.emptyMap(), false);
|
||||||
|
this.originalType = originalType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getOriginalType() {
|
||||||
|
return originalType;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
if (!super.equals(o)) return false;
|
||||||
|
UnsupportedEsField that = (UnsupportedEsField) o;
|
||||||
|
return Objects.equals(originalType, that.originalType);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
|
||||||
|
return Objects.hash(super.hashCode(), originalType);
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,16 +18,15 @@ import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.KeywordType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.type.TextType;
|
|
||||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.xpack.sql.type.DataTypes.BOOLEAN;
|
import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN;
|
||||||
import static org.elasticsearch.xpack.sql.type.DataTypes.KEYWORD;
|
import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD;
|
||||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.hasItem;
|
import static org.hamcrest.Matchers.hasItem;
|
||||||
import static org.hamcrest.Matchers.hasItems;
|
import static org.hamcrest.Matchers.hasItems;
|
||||||
|
@ -46,7 +45,7 @@ public class FieldAttributeTests extends ESTestCase {
|
||||||
parser = new SqlParser(DateTimeZone.UTC);
|
parser = new SqlParser(DateTimeZone.UTC);
|
||||||
functionRegistry = new FunctionRegistry();
|
functionRegistry = new FunctionRegistry();
|
||||||
|
|
||||||
Map<String, DataType> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
Map<String, EsField> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
||||||
|
|
||||||
EsIndex test = new EsIndex("test", mapping);
|
EsIndex test = new EsIndex("test", mapping);
|
||||||
getIndexResult = IndexResolution.valid(test);
|
getIndexResult = IndexResolution.valid(test);
|
||||||
|
@ -90,30 +89,30 @@ public class FieldAttributeTests extends ESTestCase {
|
||||||
FieldAttribute attr = attribute("some.string");
|
FieldAttribute attr = attribute("some.string");
|
||||||
assertThat(attr.path(), is("some"));
|
assertThat(attr.path(), is("some"));
|
||||||
assertThat(attr.name(), is("some.string"));
|
assertThat(attr.name(), is("some.string"));
|
||||||
assertThat(attr.dataType(), instanceOf(TextType.class));
|
assertThat(attr.dataType(), is(DataType.TEXT));
|
||||||
assertThat(attr.isInexact(), is(true));
|
assertThat(attr.isInexact(), is(true));
|
||||||
FieldAttribute exact = attr.exactAttribute();
|
FieldAttribute exact = attr.exactAttribute();
|
||||||
assertThat(exact.isInexact(), is(false));
|
assertThat(exact.isInexact(), is(false));
|
||||||
assertThat(exact.name(), is("some.string.typical"));
|
assertThat(exact.name(), is("some.string.typical"));
|
||||||
assertThat(exact.dataType(), instanceOf(KeywordType.class));
|
assertThat(exact.dataType(), is(KEYWORD));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAmbiguousExactKeyword() {
|
public void testAmbiguousExactKeyword() {
|
||||||
FieldAttribute attr = attribute("some.ambiguous");
|
FieldAttribute attr = attribute("some.ambiguous");
|
||||||
assertThat(attr.path(), is("some"));
|
assertThat(attr.path(), is("some"));
|
||||||
assertThat(attr.name(), is("some.ambiguous"));
|
assertThat(attr.name(), is("some.ambiguous"));
|
||||||
assertThat(attr.dataType(), instanceOf(TextType.class));
|
assertThat(attr.dataType(), is(DataType.TEXT));
|
||||||
assertThat(attr.isInexact(), is(true));
|
assertThat(attr.isInexact(), is(true));
|
||||||
MappingException me = expectThrows(MappingException.class, () -> attr.exactAttribute());
|
MappingException me = expectThrows(MappingException.class, () -> attr.exactAttribute());
|
||||||
assertThat(me.getMessage(),
|
assertThat(me.getMessage(),
|
||||||
is("Multiple exact keyword candidates [one, two] available for [some.ambiguous]; specify which one to use"));
|
is("Multiple exact keyword candidates available for [ambiguous]; specify which one to use"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNormalizedKeyword() {
|
public void testNormalizedKeyword() {
|
||||||
FieldAttribute attr = attribute("some.string.normalized");
|
FieldAttribute attr = attribute("some.string.normalized");
|
||||||
assertThat(attr.path(), is("some.string"));
|
assertThat(attr.path(), is("some.string"));
|
||||||
assertThat(attr.name(), is("some.string.normalized"));
|
assertThat(attr.name(), is("some.string.normalized"));
|
||||||
assertThat(attr.dataType(), instanceOf(KeywordType.class));
|
assertThat(attr.dataType(), is(KEYWORD));
|
||||||
assertThat(attr.isInexact(), is(true));
|
assertThat(attr.isInexact(), is(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
|
import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
||||||
private SqlParser parser = new SqlParser(DateTimeZone.UTC);
|
private SqlParser parser = new SqlParser(DateTimeZone.UTC);
|
||||||
|
|
||||||
private String verify(String sql) {
|
private String verify(String sql) {
|
||||||
Map<String, DataType> mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json");
|
Map<String, EsField> mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json");
|
||||||
EsIndex test = new EsIndex("test", mapping);
|
EsIndex test = new EsIndex("test", mapping);
|
||||||
return verify(IndexResolution.valid(test), sql);
|
return verify(IndexResolution.valid(test), sql);
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.ScoreSort;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
|
import org.elasticsearch.xpack.sql.querydsl.container.Sort.Direction;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery;
|
import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
|
|
||||||
import static java.util.Collections.emptyList;
|
import static java.util.Collections.emptyList;
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
|
@ -94,12 +94,12 @@ public class SourceGeneratorTests extends ESTestCase {
|
||||||
|
|
||||||
public void testSortFieldSpecified() {
|
public void testSortFieldSpecified() {
|
||||||
QueryContainer container = new QueryContainer()
|
QueryContainer container = new QueryContainer()
|
||||||
.sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.ASC));
|
.sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", new KeywordEsField("test")), Direction.ASC));
|
||||||
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
||||||
assertEquals(singletonList(fieldSort("test").order(SortOrder.ASC)), sourceBuilder.sorts());
|
assertEquals(singletonList(fieldSort("test").order(SortOrder.ASC)), sourceBuilder.sorts());
|
||||||
|
|
||||||
container = new QueryContainer()
|
container = new QueryContainer()
|
||||||
.sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.DESC));
|
.sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", new KeywordEsField("test")), Direction.DESC));
|
||||||
sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
||||||
assertEquals(singletonList(fieldSort("test").order(SortOrder.DESC)), sourceBuilder.sorts());
|
assertEquals(singletonList(fieldSort("test").order(SortOrder.DESC)), sourceBuilder.sorts());
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,17 +70,17 @@ public class LiteralTests extends AbstractNodeTestCase<Literal, Expression> {
|
||||||
* after a generators is its "native" type.
|
* after a generators is its "native" type.
|
||||||
*/
|
*/
|
||||||
private static final List<ValueAndCompatibleTypes> GENERATORS = Arrays.asList(
|
private static final List<ValueAndCompatibleTypes> GENERATORS = Arrays.asList(
|
||||||
new ValueAndCompatibleTypes(() -> randomBoolean() ? randomBoolean() : randomFrom("true", "false"), DataTypes.BOOLEAN),
|
new ValueAndCompatibleTypes(() -> randomBoolean() ? randomBoolean() : randomFrom("true", "false"), DataType.BOOLEAN),
|
||||||
new ValueAndCompatibleTypes(ESTestCase::randomByte, DataTypes.BYTE, DataTypes.SHORT, DataTypes.INTEGER, DataTypes.LONG,
|
new ValueAndCompatibleTypes(ESTestCase::randomByte, DataType.BYTE, DataType.SHORT, DataType.INTEGER, DataType.LONG,
|
||||||
DataTypes.FLOAT, DataTypes.DOUBLE, DataTypes.BOOLEAN),
|
DataType.FLOAT, DataType.DOUBLE, DataType.BOOLEAN),
|
||||||
new ValueAndCompatibleTypes(ESTestCase::randomShort, DataTypes.SHORT, DataTypes.INTEGER, DataTypes.LONG,
|
new ValueAndCompatibleTypes(ESTestCase::randomShort, DataType.SHORT, DataType.INTEGER, DataType.LONG,
|
||||||
DataTypes.FLOAT, DataTypes.DOUBLE, DataTypes.BOOLEAN),
|
DataType.FLOAT, DataType.DOUBLE, DataType.BOOLEAN),
|
||||||
new ValueAndCompatibleTypes(ESTestCase::randomInt, DataTypes.INTEGER, DataTypes.LONG,
|
new ValueAndCompatibleTypes(ESTestCase::randomInt, DataType.INTEGER, DataType.LONG,
|
||||||
DataTypes.FLOAT, DataTypes.DOUBLE, DataTypes.BOOLEAN),
|
DataType.FLOAT, DataType.DOUBLE, DataType.BOOLEAN),
|
||||||
new ValueAndCompatibleTypes(ESTestCase::randomLong, DataTypes.LONG, DataTypes.FLOAT, DataTypes.DOUBLE, DataTypes.BOOLEAN),
|
new ValueAndCompatibleTypes(ESTestCase::randomLong, DataType.LONG, DataType.FLOAT, DataType.DOUBLE, DataType.BOOLEAN),
|
||||||
new ValueAndCompatibleTypes(ESTestCase::randomFloat, DataTypes.FLOAT, DataTypes.LONG, DataTypes.DOUBLE, DataTypes.BOOLEAN),
|
new ValueAndCompatibleTypes(ESTestCase::randomFloat, DataType.FLOAT, DataType.LONG, DataType.DOUBLE, DataType.BOOLEAN),
|
||||||
new ValueAndCompatibleTypes(ESTestCase::randomDouble, DataTypes.DOUBLE, DataTypes.LONG, DataTypes.FLOAT, DataTypes.BOOLEAN),
|
new ValueAndCompatibleTypes(ESTestCase::randomDouble, DataType.DOUBLE, DataType.LONG, DataType.FLOAT, DataType.BOOLEAN),
|
||||||
new ValueAndCompatibleTypes(() -> randomAlphaOfLength(5), DataTypes.KEYWORD));
|
new ValueAndCompatibleTypes(() -> randomAlphaOfLength(5), DataType.KEYWORD));
|
||||||
|
|
||||||
public static Literal randomLiteral() {
|
public static Literal randomLiteral() {
|
||||||
ValueAndCompatibleTypes gen = randomFrom(GENERATORS);
|
ValueAndCompatibleTypes gen = randomFrom(GENERATORS);
|
||||||
|
@ -146,8 +146,8 @@ public class LiteralTests extends AbstractNodeTestCase<Literal, Expression> {
|
||||||
|
|
||||||
private List<DataType> validReplacementDataTypes(Object value, DataType type) {
|
private List<DataType> validReplacementDataTypes(Object value, DataType type) {
|
||||||
List<DataType> validDataTypes = new ArrayList<>();
|
List<DataType> validDataTypes = new ArrayList<>();
|
||||||
List<DataType> options = Arrays.asList(DataTypes.BYTE, DataTypes.SHORT, DataTypes.INTEGER, DataTypes.LONG,
|
List<DataType> options = Arrays.asList(DataType.BYTE, DataType.SHORT, DataType.INTEGER, DataType.LONG,
|
||||||
DataTypes.FLOAT, DataTypes.DOUBLE, DataTypes.BOOLEAN);
|
DataType.FLOAT, DataType.DOUBLE, DataType.BOOLEAN);
|
||||||
for (DataType candidate : options) {
|
for (DataType candidate : options) {
|
||||||
try {
|
try {
|
||||||
DataTypeConversion.Conversion c = DataTypeConversion.conversionFor(type, candidate);
|
DataTypeConversion.Conversion c = DataTypeConversion.conversionFor(type, candidate);
|
||||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime;
|
||||||
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||||
import org.elasticsearch.xpack.sql.type.DateType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
@ -29,6 +29,6 @@ public class DayOfYearTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DayOfYear build(Object value, DateTimeZone timeZone) {
|
private DayOfYear build(Object value, DateTimeZone timeZone) {
|
||||||
return new DayOfYear(null, new Literal(null, value, DateType.DEFAULT), timeZone);
|
return new DayOfYear(null, new Literal(null, value, DataType.DATE), timeZone);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class OptimizerTests extends ESTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DataType dataType() {
|
public DataType dataType() {
|
||||||
return DataTypes.BOOLEAN;
|
return DataType.BOOLEAN;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -264,14 +264,14 @@ public class OptimizerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testConstantFoldingDatetime() {
|
public void testConstantFoldingDatetime() {
|
||||||
Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataTypes.DATE);
|
Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATE);
|
||||||
assertEquals(2018, unwrapAlias(new ConstantFolding().rule(new Year(EMPTY, cast, DateTimeZone.UTC))));
|
assertEquals(2018, unwrapAlias(new ConstantFolding().rule(new Year(EMPTY, cast, DateTimeZone.UTC))));
|
||||||
assertEquals(1, unwrapAlias(new ConstantFolding().rule(new MonthOfYear(EMPTY, cast, DateTimeZone.UTC))));
|
assertEquals(1, unwrapAlias(new ConstantFolding().rule(new MonthOfYear(EMPTY, cast, DateTimeZone.UTC))));
|
||||||
assertEquals(19, unwrapAlias(new ConstantFolding().rule(new DayOfMonth(EMPTY, cast, DateTimeZone.UTC))));
|
assertEquals(19, unwrapAlias(new ConstantFolding().rule(new DayOfMonth(EMPTY, cast, DateTimeZone.UTC))));
|
||||||
assertEquals(19, unwrapAlias(new ConstantFolding().rule(new DayOfYear(EMPTY, cast, DateTimeZone.UTC))));
|
assertEquals(19, unwrapAlias(new ConstantFolding().rule(new DayOfYear(EMPTY, cast, DateTimeZone.UTC))));
|
||||||
assertEquals(3, unwrapAlias(new ConstantFolding().rule(new WeekOfYear(EMPTY, cast, DateTimeZone.UTC))));
|
assertEquals(3, unwrapAlias(new ConstantFolding().rule(new WeekOfYear(EMPTY, cast, DateTimeZone.UTC))));
|
||||||
assertNull(unwrapAlias(new ConstantFolding().rule(
|
assertNull(unwrapAlias(new ConstantFolding().rule(
|
||||||
new WeekOfYear(EMPTY, new Literal(EMPTY, null, DataTypes.NULL), DateTimeZone.UTC))));
|
new WeekOfYear(EMPTY, new Literal(EMPTY, null, DataType.NULL), DateTimeZone.UTC))));
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object unwrapAlias(Expression e) {
|
private Object unwrapAlias(Expression e) {
|
||||||
|
|
|
@ -19,7 +19,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||||
import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation;
|
import org.elasticsearch.xpack.sql.planner.QueryTranslator.QueryTranslation;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.query.Query;
|
import org.elasticsearch.xpack.sql.querydsl.query.Query;
|
||||||
import org.elasticsearch.xpack.sql.querydsl.query.TermQuery;
|
import org.elasticsearch.xpack.sql.querydsl.query.TermQuery;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
@ -36,7 +36,7 @@ public class QueryTranslatorTests extends ESTestCase {
|
||||||
parser = new SqlParser(DateTimeZone.UTC);
|
parser = new SqlParser(DateTimeZone.UTC);
|
||||||
functionRegistry = new FunctionRegistry();
|
functionRegistry = new FunctionRegistry();
|
||||||
|
|
||||||
Map<String, DataType> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
Map<String, EsField> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
||||||
|
|
||||||
EsIndex test = new EsIndex("test", mapping);
|
EsIndex test = new EsIndex("test", mapping);
|
||||||
getIndexResult = IndexResolution.valid(test);
|
getIndexResult = IndexResolution.valid(test);
|
||||||
|
|
|
@ -14,9 +14,12 @@ import org.elasticsearch.xpack.sql.optimizer.Optimizer;
|
||||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
import org.elasticsearch.xpack.sql.type.EsField;
|
||||||
|
import org.elasticsearch.xpack.sql.type.KeywordEsField;
|
||||||
|
import org.elasticsearch.xpack.sql.type.TextEsField;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -27,11 +30,11 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
||||||
private Planner planner = new Planner();
|
private Planner planner = new Planner();
|
||||||
|
|
||||||
private String verify(String sql) {
|
private String verify(String sql) {
|
||||||
Map<String, DataType> mapping = new LinkedHashMap<>();
|
Map<String, EsField> mapping = new LinkedHashMap<>();
|
||||||
mapping.put("bool", DataTypes.BOOLEAN);
|
mapping.put("bool", new EsField("bool", DataType.BOOLEAN, Collections.emptyMap(), true));
|
||||||
mapping.put("int", DataTypes.INTEGER);
|
mapping.put("int", new EsField("int", DataType.INTEGER, Collections.emptyMap(), true));
|
||||||
mapping.put("text", DataTypes.TEXT);
|
mapping.put("text", new TextEsField("text", Collections.emptyMap(), true));
|
||||||
mapping.put("keyword", DataTypes.KEYWORD);
|
mapping.put("keyword", new KeywordEsField("keyword", Collections.emptyMap(), true, DataType.KEYWORD.defaultPrecision, true));
|
||||||
EsIndex test = new EsIndex("test", mapping);
|
EsIndex test = new EsIndex("test", mapping);
|
||||||
IndexResolution getIndexResult = IndexResolution.valid(test);
|
IndexResolution getIndexResult = IndexResolution.valid(test);
|
||||||
Analyzer analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, DateTimeZone.UTC);
|
Analyzer analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, DateTimeZone.UTC);
|
||||||
|
|
|
@ -13,11 +13,11 @@ import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
public class DataTypeConversionTests extends ESTestCase {
|
public class DataTypeConversionTests extends ESTestCase {
|
||||||
public void testConversionToString() {
|
public void testConversionToString() {
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new DoubleType(true), KeywordType.DEFAULT);
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.DOUBLE, DataType.KEYWORD);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals("10.0", conversion.convert(10.0));
|
assertEquals("10.0", conversion.convert(10.0));
|
||||||
|
|
||||||
conversion = DataTypeConversion.conversionFor(new DateType(true), KeywordType.DEFAULT);
|
conversion = DataTypeConversion.conversionFor(DataType.DATE, DataType.KEYWORD);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals("1970-01-01T00:00:00.000Z", conversion.convert(new DateTime(0, DateTimeZone.UTC)));
|
assertEquals("1970-01-01T00:00:00.000Z", conversion.convert(new DateTime(0, DateTimeZone.UTC)));
|
||||||
}
|
}
|
||||||
|
@ -26,9 +26,9 @@ public class DataTypeConversionTests extends ESTestCase {
|
||||||
* Test conversion to a date or long. These are almost the same.
|
* Test conversion to a date or long. These are almost the same.
|
||||||
*/
|
*/
|
||||||
public void testConversionToLongOrDate() {
|
public void testConversionToLongOrDate() {
|
||||||
DataType to = randomBoolean() ? new LongType(true) : new DateType(true);
|
DataType to = randomBoolean() ? DataType.LONG : DataType.DATE;
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new DoubleType(true), to);
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.DOUBLE, to);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(10L, conversion.convert(10.0));
|
assertEquals(10L, conversion.convert(10.0));
|
||||||
assertEquals(10L, conversion.convert(10.1));
|
assertEquals(10L, conversion.convert(10.1));
|
||||||
|
@ -37,20 +37,20 @@ public class DataTypeConversionTests extends ESTestCase {
|
||||||
assertEquals("[" + Double.MAX_VALUE + "] out of [Long] range", e.getMessage());
|
assertEquals("[" + Double.MAX_VALUE + "] out of [Long] range", e.getMessage());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new IntegerType(true), to);
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.INTEGER, to);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(10L, conversion.convert(10));
|
assertEquals(10L, conversion.convert(10));
|
||||||
assertEquals(-134L, conversion.convert(-134));
|
assertEquals(-134L, conversion.convert(-134));
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new BooleanType(true), to);
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.BOOLEAN, to);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(1, conversion.convert(true));
|
assertEquals(1, conversion.convert(true));
|
||||||
assertEquals(0, conversion.convert(false));
|
assertEquals(0, conversion.convert(false));
|
||||||
}
|
}
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(KeywordType.DEFAULT, to);
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.KEYWORD, to);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
if (to instanceof LongType) {
|
if (to == DataType.LONG) {
|
||||||
assertEquals(1L, conversion.convert("1"));
|
assertEquals(1L, conversion.convert("1"));
|
||||||
assertEquals(0L, conversion.convert("-0"));
|
assertEquals(0L, conversion.convert("-0"));
|
||||||
Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff"));
|
Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff"));
|
||||||
|
@ -67,26 +67,26 @@ public class DataTypeConversionTests extends ESTestCase {
|
||||||
|
|
||||||
public void testConversionToDouble() {
|
public void testConversionToDouble() {
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new FloatType(true), new DoubleType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.FLOAT, DataType.DOUBLE);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(10.0, (double) conversion.convert(10.0f), 0.00001);
|
assertEquals(10.0, (double) conversion.convert(10.0f), 0.00001);
|
||||||
assertEquals(10.1, (double) conversion.convert(10.1f), 0.00001);
|
assertEquals(10.1, (double) conversion.convert(10.1f), 0.00001);
|
||||||
assertEquals(10.6, (double) conversion.convert(10.6f), 0.00001);
|
assertEquals(10.6, (double) conversion.convert(10.6f), 0.00001);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new IntegerType(true), new DoubleType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.INTEGER, DataType.DOUBLE);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(10.0, (double) conversion.convert(10), 0.00001);
|
assertEquals(10.0, (double) conversion.convert(10), 0.00001);
|
||||||
assertEquals(-134.0, (double) conversion.convert(-134), 0.00001);
|
assertEquals(-134.0, (double) conversion.convert(-134), 0.00001);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new BooleanType(true), new DoubleType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.BOOLEAN, DataType.DOUBLE);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(1.0, (double) conversion.convert(true), 0);
|
assertEquals(1.0, (double) conversion.convert(true), 0);
|
||||||
assertEquals(0.0, (double) conversion.convert(false), 0);
|
assertEquals(0.0, (double) conversion.convert(false), 0);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(KeywordType.DEFAULT, new DoubleType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.KEYWORD, DataType.DOUBLE);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(1.0, (double) conversion.convert("1"), 0);
|
assertEquals(1.0, (double) conversion.convert("1"), 0);
|
||||||
assertEquals(0.0, (double) conversion.convert("-0"), 0);
|
assertEquals(0.0, (double) conversion.convert("-0"), 0);
|
||||||
|
@ -98,28 +98,28 @@ public class DataTypeConversionTests extends ESTestCase {
|
||||||
|
|
||||||
public void testConversionToBoolean() {
|
public void testConversionToBoolean() {
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new FloatType(true), new BooleanType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.FLOAT, DataType.BOOLEAN);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(true, conversion.convert(10.0f));
|
assertEquals(true, conversion.convert(10.0f));
|
||||||
assertEquals(true, conversion.convert(-10.0f));
|
assertEquals(true, conversion.convert(-10.0f));
|
||||||
assertEquals(false, conversion.convert(0.0f));
|
assertEquals(false, conversion.convert(0.0f));
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new IntegerType(true), new BooleanType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.INTEGER, DataType.BOOLEAN);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(true, conversion.convert(10));
|
assertEquals(true, conversion.convert(10));
|
||||||
assertEquals(true, conversion.convert(-10));
|
assertEquals(true, conversion.convert(-10));
|
||||||
assertEquals(false, conversion.convert(0));
|
assertEquals(false, conversion.convert(0));
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new DoubleType(true), new BooleanType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.DOUBLE, DataType.BOOLEAN);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(true, conversion.convert(10.0));
|
assertEquals(true, conversion.convert(10.0));
|
||||||
assertEquals(true, conversion.convert(-10.0));
|
assertEquals(true, conversion.convert(-10.0));
|
||||||
assertEquals(false, conversion.convert(0.0));
|
assertEquals(false, conversion.convert(0.0));
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(KeywordType.DEFAULT, new BooleanType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.KEYWORD, DataType.BOOLEAN);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
// We only handled upper and lower case true and false
|
// We only handled upper and lower case true and false
|
||||||
assertEquals(true, conversion.convert("true"));
|
assertEquals(true, conversion.convert("true"));
|
||||||
|
@ -144,7 +144,7 @@ public class DataTypeConversionTests extends ESTestCase {
|
||||||
|
|
||||||
public void testConversionToInt() {
|
public void testConversionToInt() {
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new DoubleType(true), new IntegerType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.DOUBLE, DataType.INTEGER);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals(10, conversion.convert(10.0));
|
assertEquals(10, conversion.convert(10.0));
|
||||||
assertEquals(10, conversion.convert(10.1));
|
assertEquals(10, conversion.convert(10.1));
|
||||||
|
@ -156,7 +156,7 @@ public class DataTypeConversionTests extends ESTestCase {
|
||||||
|
|
||||||
public void testConversionToShort() {
|
public void testConversionToShort() {
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new DoubleType(true), new ShortType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.DOUBLE, DataType.SHORT);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals((short) 10, conversion.convert(10.0));
|
assertEquals((short) 10, conversion.convert(10.0));
|
||||||
assertEquals((short) 10, conversion.convert(10.1));
|
assertEquals((short) 10, conversion.convert(10.1));
|
||||||
|
@ -168,7 +168,7 @@ public class DataTypeConversionTests extends ESTestCase {
|
||||||
|
|
||||||
public void testConversionToByte() {
|
public void testConversionToByte() {
|
||||||
{
|
{
|
||||||
Conversion conversion = DataTypeConversion.conversionFor(new DoubleType(true), new ByteType(true));
|
Conversion conversion = DataTypeConversion.conversionFor(DataType.DOUBLE, DataType.BYTE);
|
||||||
assertNull(conversion.convert(null));
|
assertNull(conversion.convert(null));
|
||||||
assertEquals((byte) 10, conversion.convert(10.0));
|
assertEquals((byte) 10, conversion.convert(10.0));
|
||||||
assertEquals((byte) 10, conversion.convert(10.1));
|
assertEquals((byte) 10, conversion.convert(10.1));
|
||||||
|
|
|
@ -14,184 +14,182 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
import static org.elasticsearch.xpack.sql.type.DataTypes.DATE;
|
import static org.elasticsearch.xpack.sql.type.DataType.DATE;
|
||||||
import static org.elasticsearch.xpack.sql.type.DataTypes.INTEGER;
|
import static org.elasticsearch.xpack.sql.type.DataType.INTEGER;
|
||||||
import static org.elasticsearch.xpack.sql.type.DataTypes.KEYWORD;
|
import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD;
|
||||||
import static org.elasticsearch.xpack.sql.type.DataTypes.TEXT;
|
import static org.elasticsearch.xpack.sql.type.DataType.NESTED;
|
||||||
|
import static org.elasticsearch.xpack.sql.type.DataType.OBJECT;
|
||||||
|
import static org.elasticsearch.xpack.sql.type.DataType.TEXT;
|
||||||
import static org.hamcrest.Matchers.hasSize;
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
public class TypesTests extends ESTestCase {
|
public class TypesTests extends ESTestCase {
|
||||||
|
|
||||||
public void testNullMap() throws Exception {
|
public void testNullMap() {
|
||||||
Map<String, DataType> fromEs = Types.fromEs(null);
|
Map<String, EsField> fromEs = Types.fromEs(null);
|
||||||
assertThat(fromEs.isEmpty(), is(true));
|
assertThat(fromEs.isEmpty(), is(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmptyMap() throws Exception {
|
public void testEmptyMap() {
|
||||||
Map<String, DataType> fromEs = Types.fromEs(emptyMap());
|
Map<String, EsField> fromEs = Types.fromEs(emptyMap());
|
||||||
assertThat(fromEs.isEmpty(), is(true));
|
assertThat(fromEs.isEmpty(), is(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBasicMapping() throws Exception {
|
public void testBasicMapping() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-basic.json");
|
Map<String, EsField> mapping = loadMapping("mapping-basic.json");
|
||||||
assertThat(mapping.size(), is(6));
|
assertThat(mapping.size(), is(6));
|
||||||
assertThat(mapping.get("emp_no"), is(INTEGER));
|
assertThat(mapping.get("emp_no").getDataType(), is(INTEGER));
|
||||||
assertThat(mapping.get("first_name"), instanceOf(TextType.class));
|
assertThat(mapping.get("first_name"), instanceOf(TextEsField.class));
|
||||||
assertThat(mapping.get("last_name"), is(TEXT));
|
assertThat(mapping.get("last_name").getDataType(), is(TEXT));
|
||||||
assertThat(mapping.get("gender"), is(KEYWORD));
|
assertThat(mapping.get("gender").getDataType(), is(KEYWORD));
|
||||||
assertThat(mapping.get("salary"), is(INTEGER));
|
assertThat(mapping.get("salary").getDataType(), is(INTEGER));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDefaultStringMapping() throws Exception {
|
public void testDefaultStringMapping() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-default-string.json");
|
Map<String, EsField> mapping = loadMapping("mapping-default-string.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
assertThat(mapping.get("dep_no").same(TEXT), is(true));
|
assertThat(mapping.get("dep_no").getDataType(), is(TEXT));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTextField() throws Exception {
|
public void testTextField() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-text.json");
|
Map<String, EsField> mapping = loadMapping("mapping-text.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("full_name");
|
EsField type = mapping.get("full_name");
|
||||||
assertThat(type, instanceOf(TextType.class));
|
assertThat(type, instanceOf(TextEsField.class));
|
||||||
assertThat(type.hasDocValues(), is(false));
|
assertThat(type.hasDocValues(), is(false));
|
||||||
TextType ttype = (TextType) type;
|
TextEsField ttype = (TextEsField) type;
|
||||||
assertThat(type.precision(), is(Integer.MAX_VALUE));
|
assertThat(type.getPrecision(), is(Integer.MAX_VALUE));
|
||||||
assertThat(ttype.hasFieldData(), is(false));
|
assertThat(ttype.hasDocValues(), is(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testKeywordField() throws Exception {
|
public void testKeywordField() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-keyword.json");
|
Map<String, EsField> mapping = loadMapping("mapping-keyword.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("full_name");
|
EsField field = mapping.get("full_name");
|
||||||
assertThat(type, instanceOf(KeywordType.class));
|
assertThat(field, instanceOf(KeywordEsField.class));
|
||||||
assertThat(type.hasDocValues(), is(true));
|
assertThat(field.hasDocValues(), is(true));
|
||||||
assertThat(type.precision(), is(256));
|
assertThat(field.getPrecision(), is(256));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDateField() throws Exception {
|
public void testDateField() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-date.json");
|
Map<String, EsField> mapping = loadMapping("mapping-date.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("date");
|
EsField field = mapping.get("date");
|
||||||
assertThat(type, is(DATE));
|
assertThat(field.getDataType(), is(DATE));
|
||||||
assertThat(type.hasDocValues(), is(true));
|
assertThat(field.hasDocValues(), is(true));
|
||||||
assertThat(type.precision(), is(19));
|
assertThat(field.getPrecision(), is(19));
|
||||||
|
|
||||||
DateType dtype = (DateType) type;
|
DateEsField dfield = (DateEsField) field;
|
||||||
List<String> formats = dtype.formats();
|
List<String> formats = dfield.getFormats();
|
||||||
assertThat(formats, hasSize(3));
|
assertThat(formats, hasSize(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDateNoFormat() throws Exception {
|
public void testDateNoFormat() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-date-no-format.json");
|
Map<String, EsField> mapping = loadMapping("mapping-date-no-format.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("date");
|
EsField field = mapping.get("date");
|
||||||
assertThat(type, is(DATE));
|
assertThat(field.getDataType(), is(DATE));
|
||||||
assertThat(type.hasDocValues(), is(true));
|
assertThat(field.hasDocValues(), is(true));
|
||||||
DateType dtype = (DateType) type;
|
DateEsField dfield = (DateEsField) field;
|
||||||
// default types
|
// default types
|
||||||
assertThat(dtype.formats(), hasSize(2));
|
assertThat(dfield.getFormats(), hasSize(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDateMulti() throws Exception {
|
public void testDateMulti() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-date-multi.json");
|
Map<String, EsField> mapping = loadMapping("mapping-date-multi.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("date");
|
EsField field = mapping.get("date");
|
||||||
assertThat(type, is(DATE));
|
assertThat(field.getDataType(), is(DATE));
|
||||||
assertThat(type.hasDocValues(), is(true));
|
assertThat(field.hasDocValues(), is(true));
|
||||||
DateType dtype = (DateType) type;
|
DateEsField dfield = (DateEsField) field;
|
||||||
// default types
|
// default types
|
||||||
assertThat(dtype.formats(), hasSize(1));
|
assertThat(dfield.getFormats(), hasSize(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDocValueField() throws Exception {
|
public void testDocValueField() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-docvalues.json");
|
Map<String, EsField> mapping = loadMapping("mapping-docvalues.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("session_id");
|
EsField field = mapping.get("session_id");
|
||||||
assertThat(type, instanceOf(KeywordType.class));
|
assertThat(field, instanceOf(KeywordEsField.class));
|
||||||
assertThat(type.precision(), is(15));
|
assertThat(field.getPrecision(), is(15));
|
||||||
assertThat(type.hasDocValues(), is(false));
|
assertThat(field.hasDocValues(), is(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDottedField() throws Exception {
|
public void testDottedField() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-object.json");
|
Map<String, EsField> mapping = loadMapping("mapping-object.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(2));
|
assertThat(mapping.size(), is(2));
|
||||||
DataType type = mapping.get("manager");
|
EsField field = mapping.get("manager");
|
||||||
assertThat(type.isPrimitive(), is(false));
|
assertThat(field.getDataType().isPrimitive(), is(false));
|
||||||
assertThat(type, instanceOf(ObjectType.class));
|
assertThat(field.getDataType(), is(OBJECT));
|
||||||
ObjectType ot = (ObjectType) type;
|
Map<String, EsField> children = field.getProperties();
|
||||||
Map<String, DataType> children = ot.properties();
|
|
||||||
assertThat(children.size(), is(2));
|
assertThat(children.size(), is(2));
|
||||||
DataType names = children.get("name");
|
EsField names = children.get("name");
|
||||||
children = ((ObjectType) names).properties();
|
children = names.getProperties();
|
||||||
assertThat(children.size(), is(2));
|
assertThat(children.size(), is(2));
|
||||||
assertThat(children.get("first"), is(TEXT));
|
assertThat(children.get("first").getDataType(), is(TEXT));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultiField() throws Exception {
|
public void testMultiField() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-multi-field.json");
|
Map<String, EsField> mapping = loadMapping("mapping-multi-field.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("text");
|
EsField field = mapping.get("text");
|
||||||
assertThat(type.isPrimitive(), is(true));
|
assertThat(field.getDataType().isPrimitive(), is(true));
|
||||||
assertThat(type, instanceOf(TextType.class));
|
assertThat(field.getDataType(), is(TEXT));
|
||||||
TextType tt = (TextType) type;
|
Map<String, EsField> fields = field.getProperties();
|
||||||
Map<String, DataType> fields = tt.fields();
|
|
||||||
assertThat(fields.size(), is(2));
|
assertThat(fields.size(), is(2));
|
||||||
assertThat(fields.get("raw"), is(KEYWORD));
|
assertThat(fields.get("raw").getDataType(), is(KEYWORD));
|
||||||
assertThat(fields.get("english"), is(TEXT));
|
assertThat(fields.get("english").getDataType(), is(TEXT));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultiFieldTooManyOptions() throws Exception {
|
public void testMultiFieldTooManyOptions() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-multi-field.json");
|
Map<String, EsField> mapping = loadMapping("mapping-multi-field.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("text");
|
EsField field = mapping.get("text");
|
||||||
assertThat(type.isPrimitive(), is(true));
|
assertThat(field.getDataType().isPrimitive(), is(true));
|
||||||
assertThat(type, instanceOf(TextType.class));
|
assertThat(field, instanceOf(TextEsField.class));
|
||||||
TextType tt = (TextType) type;
|
Map<String, EsField> fields = field.getProperties();
|
||||||
Map<String, DataType> fields = tt.fields();
|
|
||||||
assertThat(fields.size(), is(2));
|
assertThat(fields.size(), is(2));
|
||||||
assertThat(fields.get("raw"), is(KEYWORD));
|
assertThat(fields.get("raw").getDataType(), is(KEYWORD));
|
||||||
assertThat(fields.get("english"), is(TEXT));
|
assertThat(fields.get("english").getDataType(), is(TEXT));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNestedDoc() throws Exception {
|
public void testNestedDoc() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-nested.json");
|
Map<String, EsField> mapping = loadMapping("mapping-nested.json");
|
||||||
|
|
||||||
assertThat(mapping.size(), is(1));
|
assertThat(mapping.size(), is(1));
|
||||||
DataType type = mapping.get("dep");
|
EsField field = mapping.get("dep");
|
||||||
assertThat(type.isPrimitive(), is(false));
|
assertThat(field.getDataType().isPrimitive(), is(false));
|
||||||
assertThat(type, instanceOf(NestedType.class));
|
assertThat(field.getDataType(), is(NESTED));
|
||||||
NestedType ot = (NestedType) type;
|
Map<String, EsField> children = field.getProperties();
|
||||||
Map<String, DataType> children = ot.properties();
|
|
||||||
assertThat(children.size(), is(4));
|
assertThat(children.size(), is(4));
|
||||||
assertThat(children.get("dep_name"), is(TEXT));
|
assertThat(children.get("dep_name").getDataType(), is(TEXT));
|
||||||
assertThat(children.get("start_date"), is(DATE));
|
assertThat(children.get("start_date").getDataType(), is(DATE));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGeoField() throws Exception {
|
public void testGeoField() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-geo.json");
|
Map<String, EsField> mapping = loadMapping("mapping-geo.json");
|
||||||
DataType dt = mapping.get("location");
|
EsField dt = mapping.get("location");
|
||||||
assertThat(dt.esName(), is("geo_point"));
|
assertThat(dt.getDataType().esType, is("geo_point"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testUnsupportedTypes() throws Exception {
|
public void testUnsupportedTypes() {
|
||||||
Map<String, DataType> mapping = loadMapping("mapping-unsupported.json");
|
Map<String, EsField> mapping = loadMapping("mapping-unsupported.json");
|
||||||
DataType dt = mapping.get("range");
|
EsField dt = mapping.get("range");
|
||||||
assertThat(dt.esName(), is("integer_range"));
|
assertThat(dt.getDataType().esType, is("unsupported"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, DataType> loadMapping(String name) {
|
public static Map<String, EsField> loadMapping(String name) {
|
||||||
InputStream stream = TypesTests.class.getResourceAsStream("/" + name);
|
InputStream stream = TypesTests.class.getResourceAsStream("/" + name);
|
||||||
assertNotNull("Could not find mapping resource:" + name, stream);
|
assertNotNull("Could not find mapping resource:" + name, stream);
|
||||||
return Types.fromEs(XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, randomBoolean()));
|
return Types.fromEs(XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, randomBoolean()));
|
||||||
|
|
Loading…
Reference in New Issue