SQL: SYS COLUMNS returns ODBC specific schema (#35870)

Due to some unresolvable type conflict between the expected definition
 in JDBC vs ODBC, the driver mode is now passed over so that certain
 command can change their results accordingly (in this case SYS COLUMNS)

Fix #35376
This commit is contained in:
Costin Leau 2018-11-26 16:59:09 +02:00 committed by GitHub
parent ca9b2b9931
commit d291b080de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 138 additions and 32 deletions

View File

@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern;
import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.plan.logical.command.Command;
import org.elasticsearch.xpack.sql.proto.Mode;
import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.Rows;
import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SchemaRowSet;
import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.session.SqlSession;
@ -58,21 +59,29 @@ public class SysColumns extends Command {
@Override @Override
public List<Attribute> output() { public List<Attribute> output() {
return output(false);
}
private List<Attribute> output(boolean odbcCompatible) {
// https://github.com/elastic/elasticsearch/issues/35376
// ODBC expects some fields as SHORT while JDBC as Integer
// which causes conversion issues and CCE
DataType clientBasedType = odbcCompatible ? SHORT : INTEGER;
return asList(keyword("TABLE_CAT"), return asList(keyword("TABLE_CAT"),
keyword("TABLE_SCHEM"), keyword("TABLE_SCHEM"),
keyword("TABLE_NAME"), keyword("TABLE_NAME"),
keyword("COLUMN_NAME"), keyword("COLUMN_NAME"),
field("DATA_TYPE", INTEGER), field("DATA_TYPE", clientBasedType),
keyword("TYPE_NAME"), keyword("TYPE_NAME"),
field("COLUMN_SIZE", INTEGER), field("COLUMN_SIZE", INTEGER),
field("BUFFER_LENGTH", INTEGER), field("BUFFER_LENGTH", INTEGER),
field("DECIMAL_DIGITS", INTEGER), field("DECIMAL_DIGITS", clientBasedType),
field("NUM_PREC_RADIX", INTEGER), field("NUM_PREC_RADIX", clientBasedType),
field("NULLABLE", INTEGER), field("NULLABLE", clientBasedType),
keyword("REMARKS"), keyword("REMARKS"),
keyword("COLUMN_DEF"), keyword("COLUMN_DEF"),
field("SQL_DATA_TYPE", INTEGER), field("SQL_DATA_TYPE", clientBasedType),
field("SQL_DATETIME_SUB", INTEGER), field("SQL_DATETIME_SUB", clientBasedType),
field("CHAR_OCTET_LENGTH", INTEGER), field("CHAR_OCTET_LENGTH", INTEGER),
field("ORDINAL_POSITION", INTEGER), field("ORDINAL_POSITION", INTEGER),
keyword("IS_NULLABLE"), keyword("IS_NULLABLE"),
@ -88,11 +97,13 @@ public class SysColumns extends Command {
@Override @Override
public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) { public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) {
boolean isOdbcClient = session.settings().mode() == Mode.ODBC;
List<Attribute> output = output(isOdbcClient);
String cluster = session.indexResolver().clusterName(); String cluster = session.indexResolver().clusterName();
// bail-out early if the catalog is present but differs // bail-out early if the catalog is present but differs
if (Strings.hasText(catalog) && !cluster.equals(catalog)) { if (Strings.hasText(catalog) && !cluster.equals(catalog)) {
listener.onResponse(Rows.empty(output())); listener.onResponse(Rows.empty(output));
return; return;
} }
@ -104,15 +115,15 @@ public class SysColumns extends Command {
session.indexResolver().resolveAsSeparateMappings(idx, regex, ActionListener.wrap(esIndices -> { session.indexResolver().resolveAsSeparateMappings(idx, regex, ActionListener.wrap(esIndices -> {
List<List<?>> rows = new ArrayList<>(); List<List<?>> rows = new ArrayList<>();
for (EsIndex esIndex : esIndices) { for (EsIndex esIndex : esIndices) {
fillInRows(cluster, esIndex.name(), esIndex.mapping(), null, rows, columnMatcher); fillInRows(cluster, esIndex.name(), esIndex.mapping(), null, rows, columnMatcher, isOdbcClient);
} }
listener.onResponse(Rows.of(output(), rows)); listener.onResponse(Rows.of(output, rows));
}, listener::onFailure)); }, listener::onFailure));
} }
static void fillInRows(String clusterName, String indexName, Map<String, EsField> mapping, String prefix, List<List<?>> rows, static void fillInRows(String clusterName, String indexName, Map<String, EsField> mapping, String prefix, List<List<?>> rows,
Pattern columnMatcher) { Pattern columnMatcher, boolean isOdbcClient) {
int pos = 0; int pos = 0;
for (Map.Entry<String, EsField> entry : mapping.entrySet()) { for (Map.Entry<String, EsField> entry : mapping.entrySet()) {
pos++; // JDBC is 1-based so we start with 1 here pos++; // JDBC is 1-based so we start with 1 here
@ -128,24 +139,24 @@ public class SysColumns extends Command {
null, null,
indexName, indexName,
name, name,
type.sqlType.getVendorTypeNumber(), odbcCompatible(type.sqlType.getVendorTypeNumber(), isOdbcClient),
type.esType.toUpperCase(Locale.ROOT), type.esType.toUpperCase(Locale.ROOT),
type.displaySize, type.displaySize,
// TODO: is the buffer_length correct? // TODO: is the buffer_length correct?
type.size, type.size,
// no DECIMAL support // no DECIMAL support
null, null,
DataTypes.metaSqlRadix(type), odbcCompatible(DataTypes.metaSqlRadix(type), isOdbcClient),
// everything is nullable // everything is nullable
DatabaseMetaData.columnNullable, odbcCompatible(DatabaseMetaData.columnNullable, isOdbcClient),
// no remarks // no remarks
null, null,
// no column def // no column def
null, null,
// SQL_DATA_TYPE apparently needs to be same as DATA_TYPE except for datetime and interval data types // SQL_DATA_TYPE apparently needs to be same as DATA_TYPE except for datetime and interval data types
DataTypes.metaSqlDataType(type), odbcCompatible(DataTypes.metaSqlDataType(type), isOdbcClient),
// SQL_DATETIME_SUB ? // SQL_DATETIME_SUB ?
DataTypes.metaSqlDateTimeSub(type), odbcCompatible(DataTypes.metaSqlDateTimeSub(type), isOdbcClient),
// char octet length // char octet length
type.isString() || type == DataType.BINARY ? type.size : null, type.isString() || type == DataType.BINARY ? type.size : null,
// position // position
@ -160,11 +171,18 @@ public class SysColumns extends Command {
)); ));
} }
if (field.getProperties() != null) { if (field.getProperties() != null) {
fillInRows(clusterName, indexName, field.getProperties(), name, rows, columnMatcher); fillInRows(clusterName, indexName, field.getProperties(), name, rows, columnMatcher, isOdbcClient);
} }
} }
} }
private static Object odbcCompatible(Integer value, boolean isOdbcClient) {
if (isOdbcClient && value != null) {
return Short.valueOf(value.shortValue());
}
return value;
}
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(catalog, index, pattern, columnPattern); return Objects.hash(catalog, index, pattern, columnPattern);

View File

@ -58,8 +58,8 @@ public class TransportSqlQueryAction extends HandledTransportAction<SqlQueryRequ
// The configuration is always created however when dealing with the next page, only the timeouts are relevant // The configuration is always created however when dealing with the next page, only the timeouts are relevant
// the rest having default values (since the query is already created) // the rest having default values (since the query is already created)
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(), Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(),
request.filter()); request.filter(), request.mode());
// mode() shouldn't be null // mode() shouldn't be null
QueryMetric metric = QueryMetric.from(request.mode(), request.clientId()); QueryMetric metric = QueryMetric.from(request.mode(), request.clientId());
planExecutor.metrics().total(metric); planExecutor.metrics().total(metric);

View File

@ -40,7 +40,7 @@ public class TransportSqlTranslateAction extends HandledTransportAction<SqlTrans
planExecutor.metrics().translate(); planExecutor.metrics().translate();
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(),
request.requestTimeout(), request.pageTimeout(), request.filter()); request.requestTimeout(), request.pageTimeout(), request.filter(), request.mode());
planExecutor.searchSource(cfg, request.query(), request.params(), ActionListener.wrap( planExecutor.searchSource(cfg, request.query(), request.params(), ActionListener.wrap(
searchSourceBuilder -> listener.onResponse(new SqlTranslateResponse(searchSourceBuilder)), listener::onFailure)); searchSourceBuilder -> listener.onResponse(new SqlTranslateResponse(searchSourceBuilder)), listener::onFailure));

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.session;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.sql.proto.Mode;
import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.proto.Protocol;
import java.util.TimeZone; import java.util.TimeZone;
@ -15,22 +16,24 @@ import java.util.TimeZone;
// Typed object holding properties for a given action // Typed object holding properties for a given action
public class Configuration { public class Configuration {
public static final Configuration DEFAULT = new Configuration(TimeZone.getTimeZone("UTC"), public static final Configuration DEFAULT = new Configuration(TimeZone.getTimeZone("UTC"),
Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null); Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN);
private TimeZone timeZone; private final TimeZone timeZone;
private int pageSize; private final int pageSize;
private TimeValue requestTimeout; private final TimeValue requestTimeout;
private TimeValue pageTimeout; private final TimeValue pageTimeout;
private final Mode mode;
@Nullable @Nullable
private QueryBuilder filter; private QueryBuilder filter;
public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter) { public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode) {
this.timeZone = tz; this.timeZone = tz;
this.pageSize = pageSize; this.pageSize = pageSize;
this.requestTimeout = requestTimeout; this.requestTimeout = requestTimeout;
this.pageTimeout = pageTimeout; this.pageTimeout = pageTimeout;
this.filter = filter; this.filter = filter;
this.mode = mode == null ? Mode.PLAIN : mode;
} }
public TimeZone timeZone() { public TimeZone timeZone() {
@ -52,4 +55,8 @@ public class Configuration {
public QueryBuilder filter() { public QueryBuilder filter() {
return filter; return filter;
} }
public Mode mode() {
return mode;
}
} }

View File

@ -16,7 +16,7 @@ public class SysColumnsTests extends ESTestCase {
public void testSysColumns() { public void testSysColumns() {
List<List<?>> rows = new ArrayList<>(); List<List<?>> rows = new ArrayList<>();
SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null); SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, false);
assertEquals(16, rows.size()); assertEquals(16, rows.size());
assertEquals(24, rows.get(0).size()); assertEquals(24, rows.get(0).size());
@ -58,6 +58,70 @@ public class SysColumnsTests extends ESTestCase {
assertEquals(Integer.MAX_VALUE, bufferLength(row)); assertEquals(Integer.MAX_VALUE, bufferLength(row));
} }
public void testSysColumnsInOdbcMode() {
List<List<?>> rows = new ArrayList<>();
SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null, true);
assertEquals(16, rows.size());
assertEquals(24, rows.get(0).size());
List<?> row = rows.get(0);
assertEquals("bool", name(row));
assertEquals((short) Types.BOOLEAN, sqlType(row));
assertEquals(null, radix(row));
assertEquals(1, bufferLength(row));
row = rows.get(1);
assertEquals("int", name(row));
assertEquals((short) Types.INTEGER, sqlType(row));
assertEquals(Short.class, radix(row).getClass());
assertEquals(4, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());
row = rows.get(2);
assertEquals("text", name(row));
assertEquals((short) Types.VARCHAR, sqlType(row));
assertEquals(null, radix(row));
assertEquals(Integer.MAX_VALUE, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());
row = rows.get(4);
assertEquals("date", name(row));
assertEquals((short) Types.TIMESTAMP, sqlType(row));
assertEquals(null, radix(row));
assertEquals(24, precision(row));
assertEquals(8, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());
row = rows.get(7);
assertEquals("some.dotted", name(row));
assertEquals((short) Types.STRUCT, sqlType(row));
assertEquals(null, radix(row));
assertEquals(-1, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());
row = rows.get(15);
assertEquals("some.ambiguous.normalized", name(row));
assertEquals((short) Types.VARCHAR, sqlType(row));
assertEquals(null, radix(row));
assertEquals(Integer.MAX_VALUE, bufferLength(row));
assertNull(decimalPrecision(row));
assertEquals(Short.class, nullable(row).getClass());
assertEquals(Short.class, sqlDataType(row).getClass());
assertEquals(Short.class, sqlDataTypeSub(row).getClass());
}
private static Object name(List<?> list) { private static Object name(List<?> list) {
return list.get(3); return list.get(3);
} }
@ -74,7 +138,23 @@ public class SysColumnsTests extends ESTestCase {
return list.get(7); return list.get(7);
} }
private static Object decimalPrecision(List<?> list) {
return list.get(8);
}
private static Object radix(List<?> list) { private static Object radix(List<?> list) {
return list.get(9); return list.get(9);
} }
}
private static Object nullable(List<?> list) {
return list.get(10);
}
private static Object sqlDataType(List<?> list) {
return list.get(13);
}
private static Object sqlDataTypeSub(List<?> list) {
return list.get(14);
}
}

View File

@ -16,6 +16,7 @@ import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.parser.SqlParser;
import org.elasticsearch.xpack.sql.plan.logical.command.Command; import org.elasticsearch.xpack.sql.plan.logical.command.Command;
import org.elasticsearch.xpack.sql.session.Configuration;
import org.elasticsearch.xpack.sql.session.SqlSession; import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.stats.Metrics;
import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataType;
@ -53,7 +54,7 @@ public class SysParserTests extends ESTestCase {
return Void.TYPE; return Void.TYPE;
}).when(resolver).resolveAsSeparateMappings(any(), any(), any()); }).when(resolver).resolveAsSeparateMappings(any(), any(), any());
SqlSession session = new SqlSession(null, null, null, resolver, null, null, null, null); SqlSession session = new SqlSession(Configuration.DEFAULT, null, null, resolver, null, null, null, null);
return new Tuple<>(cmd, session); return new Tuple<>(cmd, session);
} }
@ -61,10 +62,10 @@ public class SysParserTests extends ESTestCase {
Command cmd = sql("SYS TYPES").v1(); Command cmd = sql("SYS TYPES").v1();
List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", List<String> names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE",
"KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE",
"INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND",
"INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND",
"INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND",
"UNSUPPORTED", "OBJECT", "NESTED"); "UNSUPPORTED", "OBJECT", "NESTED");
cmd.execute(null, ActionListener.wrap(r -> { cmd.execute(null, ActionListener.wrap(r -> {