SQL: replace JDBC transport meta columns with SQL call (elastic/x-pack-elasticsearch#3740)

Replace meta column endpoint with SYS COLUMNS command

Original commit: elastic/x-pack-elasticsearch@819874bc5b
This commit is contained in:
Costin Leau 2018-01-29 19:18:24 +02:00 committed by GitHub
parent e929d16d76
commit 07658cc04f
32 changed files with 959 additions and 1468 deletions

View File

@ -183,35 +183,3 @@ or fewer results though. `time_zone` is the time zone to use for date
functions and date parsing. `time_zone` defaults to `utc` and can take
any values documented
http://www.joda.org/joda-time/apidocs/org/joda/time/DateTimeZone.html[here].
[[sql-rest-metadata]]
To get the list of fields that are supported in SQL execute:
[source,js]
--------------------------------------------------
POST /_xpack/sql/columns
{
"table_pattern": "library",
"column_pattern": ""
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
[source,js]
--------------------------------------------------
{
"columns": [
{"table": "library", "name": "author", "type": "text", "position": 1},
{"table": "library", "name": "name", "type": "text", "position": 2},
{"table": "library", "name": "page_count", "type": "short", "position": 3},
{"table": "library", "name": "release_date", "type": "date", "position": 4}
]
}
--------------------------------------------------
// TESTRESPONSE
The `position` is the position in the original table and won't match the position in the
array if the `column_pattern` removes any columns.

View File

@ -480,8 +480,7 @@ public class AuthorizationService extends AbstractComponent {
action.equals("indices:data/read/search/template") ||
action.equals("indices:data/write/reindex") ||
action.equals("indices:data/read/sql") ||
action.equals("indices:data/read/sql/translate") ||
action.equals("indices:admin/sql/columns") ;
action.equals("indices:data/read/sql/translate");
}
private static boolean isTranslatedToBulkAction(String action) {

View File

@ -10,7 +10,6 @@ import org.elasticsearch.xpack.sql.client.shared.Version;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import org.elasticsearch.xpack.sql.jdbc.net.client.Cursor;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo;
import org.elasticsearch.xpack.sql.plugin.MetaColumnInfo;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
@ -23,8 +22,6 @@ import java.sql.SQLFeatureNotSupportedException;
import java.util.ArrayList;
import java.util.List;
import static org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcUtils.numericPrecisionRadix;
/**
* Implementation of {@link DatabaseMetaData} for Elasticsearch. Draws inspiration
* from <a href="https://www.postgresql.org/docs/9.0/static/information-schema.html">
@ -742,73 +739,11 @@ class JdbcDatabaseMetaData implements DatabaseMetaData, JdbcWrapper {
@Override
public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
throws SQLException {
List<ColumnInfo> info = columnInfo("COLUMNS",
"TABLE_CAT",
"TABLE_SCHEM",
"TABLE_NAME",
"COLUMN_NAME",
"DATA_TYPE", int.class,
"TYPE_NAME",
"COLUMN_SIZE", int.class,
"BUFFER_LENGTH", void.class,
"DECIMAL_DIGITS", int.class,
"NUM_PREC_RADIX", int.class,
"NULLABLE", int.class,
"REMARKS",
"COLUMN_DEF",
"SQL_DATA_TYPE", int.class,
"SQL_DATETIME_SUB", int.class,
"CHAR_OCTET_LENGTH", int.class,
"ORDINAL_POSITION", int.class,
"IS_NULLABLE",
"SCOPE_CATALOG",
"SCOPE_SCHEMA",
"SCOPE_TABLE",
"SOURCE_DATA_TYPE", short.class,
"IS_AUTOINCREMENT",
"IS_GENERATEDCOLUMN");
// schema and catalogs are not being used, if these are specified return an empty result set
if (!isDefaultCatalog(catalog) || !isDefaultSchema(schemaPattern)) {
return emptySet(con.cfg, info);
}
String cat = defaultCatalog();
// escaping is done on the server
List<MetaColumnInfo> columns = con.client.metaInfoColumns(tableNamePattern, columnNamePattern);
Object[][] data = new Object[columns.size()][];
for (int i = 0; i < data.length; i++) {
data[i] = new Object[24];
Object[] row = data[i];
MetaColumnInfo col = columns.get(i);
row[ 0] = cat;
row[ 1] = "";
row[ 2] = col.table();
row[ 3] = col.name();
row[ 4] = col.jdbcType().getVendorTypeNumber();
row[ 5] = col.jdbcType().getName();
row[ 6] = col.size();
row[ 7] = null;
row[ 8] = null;
row[ 9] = numericPrecisionRadix(col.jdbcType().getVendorTypeNumber());
row[10] = columnNullable;
row[11] = null;
row[12] = null;
row[13] = null;
row[14] = null;
row[15] = null;
row[16] = col.position();
row[17] = "YES";
row[18] = null;
row[19] = null;
row[20] = null;
row[21] = null;
row[22] = "";
row[23] = "";
}
return memorySet(con.cfg, info, data);
PreparedStatement ps = con.prepareStatement("SYS COLUMNS TABLES LIKE ? LIKE ?");
ps.setString(1, tableNamePattern != null ? tableNamePattern.trim() : "%");
ps.setString(2, columnNamePattern != null ? columnNamePattern.trim() : "%");
return ps.executeQuery();
}
@Override

View File

@ -354,7 +354,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper {
}
Object val = column(columnIndex);
if (val == null) {
return null;
}
@ -371,7 +371,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper {
T t = TypeConverter.convert(val, columnType, type);
if (t != null) {
if (t != null || type == null) {
return t;
}
throw new SQLException("Conversion from type [" + columnType + "] to [" + type.getName() + "] not supported");

View File

@ -166,22 +166,4 @@ public abstract class JdbcUtils {
static JDBCType type(int jdbcType) {
return JDBCType.valueOf(jdbcType);
}
static Integer numericPrecisionRadix(int type) {
switch (type) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return 10;
case REAL:
case DOUBLE:
case FLOAT:
case DECIMAL:
case NUMERIC:
return 2;
default:
return null;
}
}
}

View File

@ -64,7 +64,7 @@ class PreparedQuery {
for (int i = 0; i < fragments.size(); i++) {
sb.append(fragments.get(i));
if (i < params.length) {
// TODO: this needs conversion
// TODO: this needs converting
sb.append(params[i].value);
}
}

View File

@ -13,8 +13,6 @@ import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.InfoResponse;
import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest;
import org.elasticsearch.xpack.sql.plugin.MetaColumnInfo;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsRequest;
import org.elasticsearch.xpack.sql.plugin.SqlQueryRequest;
import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse;
import org.joda.time.DateTimeZone;
@ -77,10 +75,6 @@ public class JdbcHttpClient {
return new InfoResponse(mainResponse.getClusterName().value(), mainResponse.getVersion().major, mainResponse.getVersion().minor);
}
public List<MetaColumnInfo> metaInfoColumns(String tablePattern, String columnPattern) throws SQLException {
return httpClient.listColumns(new SqlListColumnsRequest(AbstractSqlRequest.Mode.JDBC, tablePattern, columnPattern)).getColumns();
}
/**
* Converts REST column metadata into JDBC column metadata
*/
@ -89,5 +83,4 @@ public class JdbcHttpClient {
new ColumnInfo(columnInfo.name(), columnInfo.jdbcType(), EMPTY, EMPTY, EMPTY, EMPTY, columnInfo.displaySize())
).collect(Collectors.toList());
}
}
}

View File

@ -1,30 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
public class SqlListColumnsAction extends Action<SqlListColumnsRequest, SqlListColumnsResponse, SqlListColumnsRequestBuilder> {
public static final SqlListColumnsAction INSTANCE = new SqlListColumnsAction();
public static final String NAME = "indices:admin/sql/columns";
public static final String REST_ENDPOINT = "/_xpack/sql/columns";
private SqlListColumnsAction() {
super(NAME);
}
@Override
public SqlListColumnsRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new SqlListColumnsRequestBuilder(client, this);
}
@Override
public SqlListColumnsResponse newResponse() {
return new SqlListColumnsResponse();
}
}

View File

@ -1,134 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Request to get a list of SQL-supported columns of an index
* <p>
* It needs to be CompositeIndicesRequest because we resolve wildcards a non-standard SQL
* manner
*/
public class SqlListColumnsRequest extends AbstractSqlRequest implements ToXContentObject, CompositeIndicesRequest {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<SqlListColumnsRequest, Mode> PARSER =
new ConstructingObjectParser<>("sql_list_tables", true, (objects, mode) -> new SqlListColumnsRequest(
mode,
(String) objects[0],
(String) objects[1]
));
static {
PARSER.declareString(optionalConstructorArg(), new ParseField("table_pattern"));
PARSER.declareString(optionalConstructorArg(), new ParseField("column_pattern"));
}
@Nullable
private String tablePattern;
@Nullable
private String columnPattern;
public SqlListColumnsRequest() {
}
public SqlListColumnsRequest(Mode mode, String tablePattern, String columnPattern) {
super(mode);
this.tablePattern = tablePattern;
this.columnPattern = columnPattern;
}
public SqlListColumnsRequest(StreamInput in) throws IOException {
super(in);
this.tablePattern = in.readOptionalString();
this.columnPattern = in.readOptionalString();
}
/**
* The index pattern for the results
*/
public String getTablePattern() {
return tablePattern;
}
public void setTablePattern(String tablePattern) {
this.tablePattern = tablePattern;
}
/**
* The column pattern for the results
*/
public String getColumnPattern() {
return columnPattern;
}
public void setColumnPattern(String columnPattern) {
this.columnPattern = columnPattern;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(tablePattern);
out.writeOptionalString(columnPattern);
}
@Override
public String getDescription() {
return "SQL List Columns[" + getTablePattern() + ", " + getColumnPattern() + "]";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
if (tablePattern != null) {
builder.field("table_pattern", tablePattern);
}
if (columnPattern != null) {
builder.field("column_pattern", columnPattern);
}
}
return builder.endObject();
}
public static SqlListColumnsRequest fromXContent(XContentParser parser, Mode mode) {
return PARSER.apply(parser, mode);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
SqlListColumnsRequest that = (SqlListColumnsRequest) o;
return Objects.equals(tablePattern, that.tablePattern) &&
Objects.equals(columnPattern, that.columnPattern);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), tablePattern, columnPattern);
}
}

View File

@ -1,36 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
public class SqlListColumnsRequestBuilder extends
ActionRequestBuilder<SqlListColumnsRequest, SqlListColumnsResponse, SqlListColumnsRequestBuilder> {
public SqlListColumnsRequestBuilder(ElasticsearchClient client, SqlListColumnsAction action) {
super(client, action, new SqlListColumnsRequest());
}
public SqlListColumnsRequestBuilder indexPattern(String indexPattern) {
request.setTablePattern(indexPattern);
return this;
}
public SqlListColumnsRequestBuilder columnPattern(String columnPattern) {
request.setColumnPattern(columnPattern);
return this;
}
public SqlListColumnsRequestBuilder mode(String mode) {
request.mode(mode);
return this;
}
public SqlListColumnsRequestBuilder mode(AbstractSqlRequest.Mode mode) {
request.mode(mode);
return this;
}}

View File

@ -1,104 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Response to perform an sql query
*/
public class SqlListColumnsResponse extends ActionResponse implements ToXContentObject {
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<SqlListColumnsResponse, Void> PARSER = new ConstructingObjectParser<>("sql", true,
objects -> new SqlListColumnsResponse((List<MetaColumnInfo>) objects[0]));
public static final ParseField COLUMNS = new ParseField("columns");
static {
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> MetaColumnInfo.fromXContent(p), COLUMNS);
}
private List<MetaColumnInfo> columns;
public SqlListColumnsResponse() {
}
public SqlListColumnsResponse(List<MetaColumnInfo> columns) {
this.columns = columns;
}
/**
* The key that must be sent back to SQL to access the next page of
* results. If equal to "" then there is no next page.
*/
public List<MetaColumnInfo> getColumns() {
return columns;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
columns = in.readList(MetaColumnInfo::new);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeList(columns);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.startArray("columns");
{
for (MetaColumnInfo column : columns) {
column.toXContent(builder, params);
}
}
builder.endArray();
}
return builder.endObject();
}
public static SqlListColumnsResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SqlListColumnsResponse that = (SqlListColumnsResponse) o;
return Objects.equals(columns, that.columns);
}
@Override
public int hashCode() {
return Objects.hash(columns);
}
@Override
public String toString() {
return Strings.toString(this);
}
}

View File

@ -1,52 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.function.Consumer;
public class SqlListColumnsRequestTests extends AbstractSerializingTestCase<SqlListColumnsRequest> {
public AbstractSqlRequest.Mode testMode;
@Before
public void setup() {
testMode = randomFrom(AbstractSqlRequest.Mode.values());
}
@Override
protected SqlListColumnsRequest createTestInstance() {
return new SqlListColumnsRequest(testMode, randomAlphaOfLength(10), randomAlphaOfLength(10));
}
@Override
protected Writeable.Reader<SqlListColumnsRequest> instanceReader() {
return SqlListColumnsRequest::new;
}
@Override
protected SqlListColumnsRequest doParseInstance(XContentParser parser) {
return SqlListColumnsRequest.fromXContent(parser, testMode);
}
@Override
protected SqlListColumnsRequest mutateInstance(SqlListColumnsRequest instance) throws IOException {
@SuppressWarnings("unchecked")
Consumer<SqlListColumnsRequest> mutator = randomFrom(
request -> request.mode(randomValueOtherThan(request.mode(), () -> randomFrom(AbstractSqlRequest.Mode.values()))),
request -> request.setColumnPattern(randomValueOtherThan(request.getColumnPattern(), () -> randomAlphaOfLength(10))),
request -> request.setTablePattern(randomValueOtherThan(request.getTablePattern(), () -> randomAlphaOfLength(10)))
);
SqlListColumnsRequest newRequest =
new SqlListColumnsRequest(instance.mode(), instance.getTablePattern(), instance.getColumnPattern());
mutator.accept(newRequest);
return newRequest;
}
}

View File

@ -1,82 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
import java.io.IOException;
import java.sql.JDBCType;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
import static org.hamcrest.Matchers.hasSize;
public class SqlListColumnsResponseTests extends AbstractStreamableXContentTestCase<SqlListColumnsResponse> {
@Override
protected SqlListColumnsResponse createTestInstance() {
int columnCount = between(1, 10);
boolean jdbcMode = randomBoolean();
List<MetaColumnInfo> columns = new ArrayList<>(columnCount);
for (int i = 0; i < columnCount; i++) {
if (jdbcMode) {
columns.add(new MetaColumnInfo(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10),
randomFrom(JDBCType.values()), randomInt(25), randomInt(20)));
} else {
columns.add(new MetaColumnInfo(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), randomInt(20)));
}
}
return new SqlListColumnsResponse(columns);
}
@Override
protected SqlListColumnsResponse createBlankInstance() {
return new SqlListColumnsResponse();
}
public void testToXContent() throws IOException {
SqlListColumnsResponse testInstance = createTestInstance();
XContentBuilder builder = testInstance.toXContent(XContentFactory.jsonBuilder(), EMPTY_PARAMS);
Map<String, Object> rootMap = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
logger.info(builder.string());
if (testInstance.getColumns() != null) {
List<?> columns = (List<?>) rootMap.get("columns");
assertThat(columns, hasSize(testInstance.getColumns().size()));
for (int i = 0; i < columns.size(); i++) {
Map<?, ?> columnMap = (Map<?, ?>) columns.get(i);
MetaColumnInfo columnInfo = testInstance.getColumns().get(i);
assertEquals(columnInfo.table(), columnMap.get("table"));
assertEquals(columnInfo.name(), columnMap.get("name"));
assertEquals(columnInfo.esType(), columnMap.get("type"));
if (columnInfo.jdbcType() == null) {
assertNull(columnMap.get("jdbc_type"));
assertNull(columnMap.get("size"));
} else {
assertEquals(columnInfo.jdbcType().getVendorTypeNumber(), columnMap.get("jdbc_type"));
assertEquals(columnInfo.size(), columnMap.get("size"));
}
assertEquals(columnInfo.position(), columnMap.get("position"));
}
} else {
assertNull(rootMap.get("columns"));
}
}
@Override
protected SqlListColumnsResponse doParseInstance(XContentParser parser) {
return SqlListColumnsResponse.fromXContent(parser);
}
}

View File

@ -25,9 +25,6 @@ import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest;
import org.elasticsearch.xpack.sql.plugin.SqlClearCursorAction;
import org.elasticsearch.xpack.sql.plugin.SqlClearCursorRequest;
import org.elasticsearch.xpack.sql.plugin.SqlClearCursorResponse;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsAction;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsRequest;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsResponse;
import org.elasticsearch.xpack.sql.plugin.SqlQueryAction;
import org.elasticsearch.xpack.sql.plugin.SqlQueryRequest;
import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse;
@ -88,12 +85,8 @@ public class HttpClient {
return response.isSucceeded();
}
public SqlListColumnsResponse listColumns(SqlListColumnsRequest request) throws SQLException {
return post(SqlListColumnsAction.REST_ENDPOINT, request, SqlListColumnsResponse::fromXContent);
}
private <Request extends AbstractSqlRequest, Response> Response post(String path, Request request,
CheckedFunction<XContentParser, Response, IOException> responseParser)
private <Request extends AbstractSqlRequest, Response> Response post(String path, Request request,
CheckedFunction<XContentParser, Response, IOException> responseParser)
throws SQLException {
BytesReference requestBytes = toXContent(request);
String query = "error_trace&mode=" + request.mode();
@ -167,5 +160,4 @@ public class HttpClient {
throw new ClientException("Cannot parse response", ex);
}
}
}
}

View File

@ -2,7 +2,7 @@
* ELASTICSEARCH CONFIDENTIAL
* __________________
*
* [2014] Elasticsearch Incorporated. All Rights Reserved.
* [2017] Elasticsearch Incorporated. All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of Elasticsearch Incorporated and its suppliers,
@ -56,12 +56,11 @@ statement
| (DESCRIBE | DESC) tableIdentifier #showColumns
| SHOW FUNCTIONS (LIKE? pattern)? #showFunctions
| SHOW SCHEMAS #showSchemas
| SYS TABLES (LIKE? pattern)? #sysTables
| SYS COLUMNS (LIKE? indexPattern=pattern)? (LIKE? columnPattern=pattern)? #sysColumns
| SYS COLUMNS (TABLES LIKE? indexPattern=pattern)? (LIKE? columnPattern=pattern)? #sysColumns
| SYS TYPES #sysTypes
;
query
: (WITH namedQuery (',' namedQuery)*)? queryNoWith
;

View File

@ -12,6 +12,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ShowColumnsContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ShowFunctionsContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ShowSchemasContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ShowTablesContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SysColumnsContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SysTablesContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SysTypesContext;
import org.elasticsearch.xpack.sql.plan.TableIdentifier;
@ -22,6 +23,7 @@ import org.elasticsearch.xpack.sql.plan.logical.command.ShowColumns;
import org.elasticsearch.xpack.sql.plan.logical.command.ShowFunctions;
import org.elasticsearch.xpack.sql.plan.logical.command.ShowSchemas;
import org.elasticsearch.xpack.sql.plan.logical.command.ShowTables;
import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysColumns;
import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysTables;
import org.elasticsearch.xpack.sql.plan.logical.command.sys.SysTypes;
import org.elasticsearch.xpack.sql.tree.Location;
@ -125,6 +127,7 @@ abstract class CommandBuilder extends LogicalPlanBuilder {
return new ShowColumns(source(ctx), identifier.index());
}
@Override
public SysTables visitSysTables(SysTablesContext ctx) {
return new SysTables(source(ctx), visitPattern(ctx.pattern()));
@ -134,4 +137,9 @@ abstract class CommandBuilder extends LogicalPlanBuilder {
public SysTypes visitSysTypes(SysTypesContext ctx) {
return new SysTypes(source(ctx));
}
@Override
public Object visitSysColumns(SysColumnsContext ctx) {
return new SysColumns(source(ctx), visitPattern(ctx.indexPattern), visitPattern(ctx.columnPattern));
}
}

View File

@ -0,0 +1,185 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plan.logical.command.sys;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.regex.LikePattern;
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
import org.elasticsearch.xpack.sql.session.Rows;
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.tree.NodeInfo;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.EsField;
import java.sql.DatabaseMetaData;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;
import static java.util.Arrays.asList;
import static org.elasticsearch.xpack.sql.type.DataType.INTEGER;
import static org.elasticsearch.xpack.sql.type.DataType.NULL;
import static org.elasticsearch.xpack.sql.type.DataType.SHORT;
/**
* System command designed to be used by JDBC / ODBC for column metadata, such as
* {@link DatabaseMetaData#getColumns(String, String, String, String)}.
*/
public class SysColumns extends Command {
private final LikePattern indexPattern;
private final LikePattern columnPattern;
public SysColumns(Location location, LikePattern indexPattern, LikePattern columnPattern) {
super(location);
this.indexPattern = indexPattern;
this.columnPattern = columnPattern;
}
public LikePattern indexPattern() {
return indexPattern;
}
public LikePattern columnPattern() {
return columnPattern;
}
@Override
protected NodeInfo<SysColumns> info() {
return NodeInfo.create(this, SysColumns::new, indexPattern, columnPattern);
}
@Override
public List<Attribute> output() {
return asList(keyword("TABLE_CAT"),
keyword("TABLE_SCHEM"),
keyword("TABLE_NAME"),
keyword("COLUMN_NAME"),
field("DATA_TYPE", INTEGER),
keyword("TYPE_NAME"),
field("COLUMN_SIZE", INTEGER),
field("BUFFER_LENGTH", NULL),
field("DECIMAL_DIGITS", INTEGER),
field("NUM_PREC_RADIX", INTEGER),
field("NULLABLE", INTEGER),
keyword("REMARKS"),
keyword("COLUMN_DEF"),
field("SQL_DATA_TYPE", INTEGER),
field("SQL_DATETIME_SUB", INTEGER),
field("CHAR_OCTET_LENGTH", INTEGER),
field("ORDINAL_POSITION", INTEGER),
keyword("IS_NULLABLE"),
// JDBC specific
keyword("SCOPE_CATALOG"),
keyword("SCOPE_SCHEMA"),
keyword("SCOPE_TABLE"),
field("SOURCE_DATA_TYPE", SHORT),
keyword("IS_AUTOINCREMENT"),
keyword("IS_GENERATEDCOLUMN")
);
}
@Override
public void execute(SqlSession session, ActionListener<SchemaRowSet> listener) {
String index = indexPattern != null ? indexPattern.asIndexNameWildcard() : "*";
String regex = indexPattern != null ? indexPattern.asJavaRegex() : null;
Pattern columnMatcher = columnPattern != null ? Pattern.compile(columnPattern.asJavaRegex()) : null;
String cluster = session.indexResolver().clusterName();
session.indexResolver().resolveAsSeparateMappings(index, regex, ActionListener.wrap(esIndices -> {
List<List<?>> rows = new ArrayList<>();
for (EsIndex esIndex : esIndices) {
fillInRows(cluster, esIndex.name(), esIndex.mapping(), null, rows, columnMatcher);
}
listener.onResponse(Rows.of(output(), rows));
}, listener::onFailure));
}
static void fillInRows(String clusterName, String indexName, Map<String, EsField> mapping, String prefix, List<List<?>> rows,
Pattern columnMatcher) {
int pos = 0;
for (Map.Entry<String, EsField> entry : mapping.entrySet()) {
pos++; // JDBC is 1-based so we start with 1 here
String name = entry.getKey();
name = prefix != null ? prefix + "." + name : name;
EsField field = entry.getValue();
DataType type = field.getDataType();
if (columnMatcher == null || columnMatcher.matcher(name).matches()) {
rows.add(asList(clusterName,
// schema is not supported
null,
indexName,
name,
type.jdbcType.getVendorTypeNumber(),
type.esType.toUpperCase(Locale.ROOT),
type.displaySize,
// TODO: is the buffer_length correct?
type.size,
// no DECIMAL support
null,
// RADIX
type.isInteger ? Integer.valueOf(10) : type.isRational ? Integer.valueOf(2) : null,
// everything is nullable
DatabaseMetaData.columnNullable,
// no remarks
null,
// no column def
null,
// SQL_DATA_TYPE apparently needs to be same as DATA_TYPE except for datetime and interval data types
type.jdbcType.getVendorTypeNumber(),
// SQL_DATETIME_SUB ?
null,
// char octet length
type.isString() || type == DataType.BINARY ? type.size : null,
// position
pos,
"YES",
null,
null,
null,
null,
"NO",
"NO"
));
}
if (field.getProperties() != null) {
fillInRows(clusterName, indexName, field.getProperties(), name, rows, columnMatcher);
}
}
}
@Override
public int hashCode() {
return Objects.hash(indexPattern, columnPattern);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SysColumns other = (SysColumns) obj;
return Objects.equals(indexPattern, other.indexPattern)
&& Objects.equals(columnPattern, other.columnPattern);
}
}

View File

@ -1,41 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.RestToXContentListener;
import java.io.IOException;
import static org.elasticsearch.rest.RestRequest.Method.POST;
public class RestSqlListColumnsAction extends BaseRestHandler {
public RestSqlListColumnsAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(POST, SqlListColumnsAction.REST_ENDPOINT, this);
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
SqlListColumnsRequest listColumnsRequest;
try (XContentParser parser = request.contentOrSourceParamParser()) {
listColumnsRequest = SqlListColumnsRequest.fromXContent(parser, AbstractSqlRequest.Mode.fromString(request.param("mode")));
}
return channel -> client.executeLocally(SqlListColumnsAction.INSTANCE, listColumnsRequest, new RestToXContentListener<>(channel));
}
@Override
public String getName() {
return "xpack_sql_list_columns_action";
}
}

View File

@ -105,8 +105,7 @@ public class SqlPlugin extends Plugin implements ActionPlugin {
return Arrays.asList(new RestSqlQueryAction(settings, restController),
new RestSqlTranslateAction(settings, restController),
new RestSqlClearCursorAction(settings, restController),
new RestSqlListColumnsAction(settings, restController));
new RestSqlClearCursorAction(settings, restController));
}
@Override
@ -117,7 +116,6 @@ public class SqlPlugin extends Plugin implements ActionPlugin {
return Arrays.asList(new ActionHandler<>(SqlQueryAction.INSTANCE, TransportSqlQueryAction.class),
new ActionHandler<>(SqlTranslateAction.INSTANCE, TransportSqlTranslateAction.class),
new ActionHandler<>(SqlClearCursorAction.INSTANCE, TransportSqlClearCursorAction.class),
new ActionHandler<>(SqlListColumnsAction.INSTANCE, TransportSqlListColumnsAction.class));
new ActionHandler<>(SqlClearCursorAction.INSTANCE, TransportSqlClearCursorAction.class));
}
}

View File

@ -1,78 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
import org.elasticsearch.xpack.sql.type.EsField;
import org.elasticsearch.xpack.sql.util.StringUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import static org.elasticsearch.common.Strings.hasText;
import static org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest.Mode.JDBC;
public class TransportSqlListColumnsAction extends HandledTransportAction<SqlListColumnsRequest, SqlListColumnsResponse> {
private final SqlLicenseChecker sqlLicenseChecker;
private final IndexResolver indexResolver;
@Inject
public TransportSqlListColumnsAction(Settings settings, ThreadPool threadPool,
TransportService transportService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
SqlLicenseChecker sqlLicenseChecker, IndexResolver indexResolver) {
super(settings, SqlListColumnsAction.NAME, threadPool, transportService, actionFilters, SqlListColumnsRequest::new,
indexNameExpressionResolver);
this.sqlLicenseChecker = sqlLicenseChecker;
this.indexResolver = indexResolver;
}
@Override
protected void doExecute(SqlListColumnsRequest request, ActionListener<SqlListColumnsResponse> listener) {
sqlLicenseChecker.checkIfSqlAllowed(request.mode());
String indexPattern = hasText(request.getTablePattern()) ?
StringUtils.likeToIndexWildcard(request.getTablePattern(), (char) 0) : "*";
String regexPattern = hasText(request.getTablePattern()) ?
StringUtils.likeToJavaPattern(request.getTablePattern(), (char) 0) : null;
Pattern columnMatcher = hasText(request.getColumnPattern()) ? Pattern.compile(
StringUtils.likeToJavaPattern(request.getColumnPattern(), (char) 0)) : null;
indexResolver.resolveAsSeparateMappings(indexPattern, regexPattern, ActionListener.wrap(esIndices -> {
List<MetaColumnInfo> columns = new ArrayList<>();
for (EsIndex esIndex : esIndices) {
int pos = 0;
for (Map.Entry<String, EsField> entry : esIndex.mapping().entrySet()) {
String name = entry.getKey();
pos++; // JDBC is 1-based so we start with 1 here
if (columnMatcher == null || columnMatcher.matcher(name).matches()) {
EsField field = entry.getValue();
if (request.mode() == JDBC) {
// the column size it's actually its precision (based on the Javadocs)
columns.add(new MetaColumnInfo(esIndex.name(), name, field.getDataType().esType,
field.getDataType().jdbcType, field.getPrecision(), pos));
} else {
columns.add(new MetaColumnInfo(esIndex.name(), name, field.getDataType().esType, pos));
}
}
}
}
listener.onResponse(new SqlListColumnsResponse(columns));
}, listener::onFailure));
}
}

View File

@ -123,5 +123,4 @@ public enum DataType {
public boolean isPrimitive() {
return this != OBJECT && this != NESTED;
}
}
}

View File

@ -7,27 +7,19 @@ package org.elasticsearch.xpack.sql.action;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.sql.plugin.AbstractSqlRequest.Mode;
import org.elasticsearch.xpack.sql.plugin.ColumnInfo;
import org.elasticsearch.xpack.sql.plugin.MetaColumnInfo;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsAction;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsResponse;
import org.elasticsearch.xpack.sql.plugin.SqlQueryAction;
import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.sql.JDBCType;
import java.util.List;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
public class SqlActionIT extends AbstractSqlIntegTestCase {
public void testSqlAction() throws Exception {
assertAcked(client().admin().indices().prepareCreate("test").get());
client().prepareBulk()
@ -54,56 +46,5 @@ public class SqlActionIT extends AbstractSqlIntegTestCase {
assertEquals("baz", response.rows().get(1).get(dataIndex));
assertEquals(43L, response.rows().get(1).get(countIndex));
}
public void testSqlListColumnsAction() throws Exception {
createCompatibleIndex("bar");
createCompatibleIndex("baz");
createIncompatibleIndex("broken");
SqlListColumnsResponse response = client().prepareExecute(SqlListColumnsAction.INSTANCE)
.indexPattern("bar").columnPattern("").mode(Mode.JDBC).get();
List<MetaColumnInfo> columns = response.getColumns();
assertThat(columns, hasSize(2));
assertThat(columns, Matchers.contains(
new MetaColumnInfo("bar", "int_field", "integer", JDBCType.INTEGER, 10, 1),
new MetaColumnInfo("bar", "str_field", "text", JDBCType.VARCHAR, Integer.MAX_VALUE, 2)
));
response = client().prepareExecute(SqlListColumnsAction.INSTANCE)
.indexPattern("bar").columnPattern("").mode(Mode.PLAIN).get();
columns = response.getColumns();
assertThat(columns, hasSize(2));
assertThat(columns, Matchers.contains(
new MetaColumnInfo("bar", "int_field", "integer", null, 0, 1),
new MetaColumnInfo("bar", "str_field", "text", null, 0, 2)
));
}
private void createCompatibleIndex(String name) throws IOException {
XContentBuilder mapping = jsonBuilder().startObject();
{
mapping.startObject("properties");
{
mapping.startObject("str_field").field("type", "text").endObject();
mapping.startObject("int_field").field("type", "integer").endObject();
}
mapping.endObject();
}
mapping.endObject();
assertAcked(client().admin().indices().prepareCreate(name).addMapping("doc", mapping).get());
}
private void createIncompatibleIndex(String name) throws IOException {
assertAcked(client().admin().indices().prepareCreate(name).get());
}
/**
* Removes list of internal indices to make tests consistent between secure and unsecure environments
*/
private static List<String> removeInternal(List<String> list) {
return list.stream().filter(s -> s.startsWith(".") == false).collect(Collectors.toList());
}
}

View File

@ -19,8 +19,6 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.transport.Netty4Plugin;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsAction;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsResponse;
import org.elasticsearch.xpack.sql.plugin.SqlQueryAction;
import org.elasticsearch.xpack.sql.plugin.SqlQueryResponse;
import org.elasticsearch.xpack.sql.plugin.SqlTranslateAction;
@ -37,9 +35,6 @@ import static org.elasticsearch.license.XPackLicenseStateTests.randomBasicStanda
import static org.elasticsearch.license.XPackLicenseStateTests.randomTrialBasicStandardGoldOrPlatinumMode;
import static org.elasticsearch.license.XPackLicenseStateTests.randomTrialOrPlatinumMode;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class SqlLicenseIT extends AbstractLicensesIntegrationTestCase {
@Override
@ -140,38 +135,6 @@ public class SqlLicenseIT extends AbstractLicensesIntegrationTestCase {
assertThat(response.size(), Matchers.equalTo(2L));
}
public void testSqlListColumnsActionLicense() throws Exception {
setupTestIndex();
disableSqlLicensing();
ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class,
() -> client().prepareExecute(SqlListColumnsAction.INSTANCE).columnPattern("").indexPattern("test").get());
assertThat(e.getMessage(), equalTo("current license is non-compliant for [sql]"));
enableSqlLicensing();
SqlListColumnsResponse response = client().prepareExecute(SqlListColumnsAction.INSTANCE).columnPattern("")
.indexPattern("test").get();
assertThat(response.getColumns(), hasSize(2));
assertThat(response.getColumns().get(0).jdbcType(), nullValue());
}
public void testSqlListColumnsJdbcModeLicense() throws Exception {
setupTestIndex();
disableJdbcLicensing();
ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class,
() -> client().prepareExecute(SqlListColumnsAction.INSTANCE).columnPattern("")
.indexPattern("test").mode("jdbc").get());
assertThat(e.getMessage(), equalTo("current license is non-compliant for [jdbc]"));
enableJdbcLicensing();
SqlListColumnsResponse response = client().prepareExecute(SqlListColumnsAction.INSTANCE).columnPattern("")
.indexPattern("test").mode("jdbc").get();
assertThat(response.getColumns(), hasSize(2));
assertThat(response.getColumns().get(0).jdbcType(), notNullValue());
}
public void testSqlTranslateActionLicense() throws Exception {
setupTestIndex();
disableSqlLicensing();

View File

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.plan.logical.command.sys;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.type.TypesTests;
import java.sql.Types;
import java.util.ArrayList;
import java.util.List;
public class SysColumnsTests extends ESTestCase {
public void testSysColumns() {
List<List<?>> rows = new ArrayList<>();
SysColumns.fillInRows("test", "index", TypesTests.loadMapping("mapping-multi-field-variation.json", true), null, rows, null);
assertEquals(15, rows.size());
assertEquals(24, rows.get(0).size());
List<?> row = rows.get(0);
assertEquals("bool", name(row));
assertEquals(Types.BOOLEAN, sqlType(row));
assertEquals(null, radix(row));
assertEquals(1, bufferLength(row));
row = rows.get(1);
assertEquals("int", name(row));
assertEquals(Types.INTEGER, sqlType(row));
assertEquals(10, radix(row));
assertEquals(4, bufferLength(row));
row = rows.get(2);
assertEquals("text", name(row));
assertEquals(Types.VARCHAR, sqlType(row));
assertEquals(null, radix(row));
assertEquals(Integer.MAX_VALUE, bufferLength(row));
row = rows.get(6);
assertEquals("some.dotted", name(row));
assertEquals(Types.STRUCT, sqlType(row));
assertEquals(null, radix(row));
assertEquals(-1, bufferLength(row));
row = rows.get(14);
assertEquals("some.ambiguous.normalized", name(row));
assertEquals(Types.VARCHAR, sqlType(row));
assertEquals(null, radix(row));
assertEquals(Integer.MAX_VALUE, bufferLength(row));
}
private static Object name(List<?> list) {
return list.get(3);
}
private static Object sqlType(List<?> list) {
return list.get(4);
}
private static Object bufferLength(List<?> list) {
return list.get(7);
}
private static Object radix(List<?> list) {
return list.get(9);
}
}

View File

@ -6,13 +6,16 @@
package org.elasticsearch.xpack.sql.plan.logical.command.sys;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer;
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
import org.elasticsearch.xpack.sql.analysis.index.IndexResolution;
import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
import org.elasticsearch.xpack.sql.parser.SqlParser;
import org.elasticsearch.xpack.sql.plan.logical.command.Command;
import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.type.EsField;
import org.elasticsearch.xpack.sql.type.TypesTests;
import org.joda.time.DateTimeZone;
@ -21,20 +24,37 @@ import java.util.List;
import java.util.Map;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SysParserTests extends ESTestCase {
private final SqlParser parser = new SqlParser(DateTimeZone.UTC);
private final Map<String, EsField> mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json", true);
private Command sql(String sql) {
Map<String, EsField> mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json");
@SuppressWarnings({ "rawtypes", "unchecked" })
private Tuple<Command, SqlSession> sql(String sql) {
EsIndex test = new EsIndex("test", mapping);
Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), DateTimeZone.UTC);
return (Command) analyzer.analyze(parser.createStatement(sql), true);
Command cmd = (Command) analyzer.analyze(parser.createStatement(sql), true);
IndexResolver resolver = mock(IndexResolver.class);
when(resolver.clusterName()).thenReturn("cluster");
doAnswer(invocation -> {
((ActionListener) invocation.getArguments()[2]).onResponse(singletonList(test));
return Void.TYPE;
}).when(resolver).resolveAsSeparateMappings(any(), any(), any());
SqlSession session = new SqlSession(null, null, null, resolver, null, null, null);
return new Tuple<>(cmd, session);
}
public void testSysTypes() throws Exception {
Command cmd = sql("SYS TYPES");
Command cmd = sql("SYS TYPES").v1();
List<String> names = asList("BYTE", "SHORT", "INTEGER", "LONG", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", "KEYWORD", "TEXT",
"DATE", "BINARY", "NULL", "UNSUPPORTED", "OBJECT", "NESTED", "BOOLEAN");
@ -50,4 +70,60 @@ public class SysParserTests extends ESTestCase {
}, ex -> fail(ex.getMessage())));
}
}
public void testSysColsNoArgs(String commandVariation) throws Exception {
runSysColumns("SYS COLUMNS");
}
public void testSysColsTableOnlyPattern(String commandVariation) throws Exception {
runSysColumns("SYS COLUMNS TABLES LIKE 'test'");
}
public void testSysColsColOnlyPattern(String commandVariation) throws Exception {
runSysColumns("SYS COLUMNS LIKE '%'");
}
public void testSysColsTableAndColsPattern(String commandVariation) throws Exception {
runSysColumns("SYS COLUMNS TABLES LIKE 'test' LIKE '%'");
}
private void runSysColumns(String commandVariation) throws Exception {
Tuple<Command, SqlSession> sql = sql(commandVariation);
List<String> names = asList("bool",
"int",
"text",
"keyword",
"unsupported",
"some",
"some.dotted",
"some.dotted.field",
"some.string",
"some.string.normalized",
"some.string.typical",
"some.ambiguous",
"some.ambiguous.one",
"some.ambiguous.two",
"some.ambiguous.normalized",
"dep",
"dep.dep_name",
"dep.dep_id",
"dep.dep_id.keyword",
"dep.end_date",
"dep.start_date");
sql.v1().execute(sql.v2(), ActionListener.wrap(r -> {
assertEquals(24, r.columnCount());
assertEquals(21, r.size());
for (int i = 0; i < r.size(); i++) {
assertEquals("cluster", r.column(0));
assertNull(r.column(1));
assertEquals("test", r.column(2));
assertEquals(names.get(i), r.column(3));
r.advanceRow();
}
}, ex -> fail(ex.getMessage())));
}
}

View File

@ -1,16 +0,0 @@
{
"xpack.sql.columns": {
"documentation": "Returns a list of columns supported by SQL for the current user",
"methods": [ "POST" ],
"url": {
"path": "/_xpack/sql/columns",
"paths": [ "/_xpack/sql/columns" ],
"parts": {},
"params": {}
},
"body": {
"description" : "Specify the table pattern in the `table_pattern` and the column patter in `column_pattern` element.",
"required" : true
}
}
}

View File

@ -118,14 +118,3 @@ setup:
- match: { indices.test.total.search.open_contexts: 0 }
---
"Check list of columns":
- do:
xpack.sql.columns:
body:
table_pattern: "t%"
column_pattern: ""
- length: { columns: 2 }
- match: { columns.0.name: 'int' }
- match: { columns.1.name: 'str' }

View File

@ -3,23 +3,23 @@ read_all:
- "cluster:monitor/main" # Used by JDBC's MetaData
indices:
- names: test
privileges: [read, "indices:admin/get", "indices:admin/sql/columns"]
privileges: [read, "indices:admin/get"]
- names: bort
privileges: [read, "indices:admin/get", "indices:admin/sql/columns"]
privileges: [read, "indices:admin/get"]
read_something_else:
cluster:
- "cluster:monitor/main" # Used by JDBC's MetaData
indices:
- names: something_that_isnt_test
privileges: [read, "indices:admin/get", "indices:admin/sql/columns"]
privileges: [read, "indices:admin/get"]
read_test_a:
cluster:
- "cluster:monitor/main" # Used by JDBC's MetaData
indices:
- names: test
privileges: [read, "indices:admin/get", "indices:admin/sql/columns"]
privileges: [read, "indices:admin/get"]
field_security:
grant: [a]
@ -28,7 +28,7 @@ read_test_a_and_b:
- "cluster:monitor/main" # Used by JDBC's MetaData
indices:
- names: test
privileges: [read, "indices:admin/get", "indices:admin/sql/columns"]
privileges: [read, "indices:admin/get"]
field_security:
grant: ["*"]
except: [c]
@ -38,7 +38,7 @@ read_test_without_c_3:
- "cluster:monitor/main" # Used by JDBC's MetaData
indices:
- names: test
privileges: [read, "indices:admin/get", "indices:admin/sql/columns"]
privileges: [read, "indices:admin/get"]
query: |
{
"bool": {
@ -57,4 +57,4 @@ read_bort:
- "cluster:monitor/main" # Used by JDBC's MetaData
indices:
- names: bort
privileges: [read, "indices:admin/get", "indices:admin/sql/columns"]
privileges: [read, "indices:admin/get"]

View File

@ -5,7 +5,6 @@
*/
package org.elasticsearch.xpack.qa.sql.security;
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.io.PathUtils;
@ -28,7 +27,6 @@ import static org.elasticsearch.xpack.qa.sql.jdbc.JdbcAssert.assertResultSets;
import static org.elasticsearch.xpack.qa.sql.jdbc.JdbcIntegrationTestCase.elasticsearchAddress;
import static org.elasticsearch.xpack.qa.sql.jdbc.JdbcIntegrationTestCase.randomKnownTimeZone;
import static org.elasticsearch.xpack.qa.sql.security.RestSqlIT.SSL_ENABLED;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
public class JdbcSecurityIT extends SqlSecurityTestCase {
@ -264,20 +262,12 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
con -> con.getMetaData().getColumns("%", "%", "%t", "%"),
"full_access",
con -> con.getMetaData().getColumns("%", "%", "%", "%"));
new AuditLogAsserter()
.expect(true, GetIndexAction.NAME, "test_admin", contains("bort", "test"))
.expect(true, GetIndexAction.NAME, "full_access", contains("bort", "test"))
.assertLogs();
}
public void testMetaDataGetColumnsWithNoAccess() throws Exception {
createUser("no_access", "read_nothing");
expectForbidden("no_access", con -> con.getMetaData().getColumns("%", "%", "%", "%"));
new AuditLogAsserter()
// TODO figure out why this generates *no* logs
// .expect(false, GetIndexAction.NAME, "no_access", contains("bort", "test"))
.assertLogs();
}
public void testMetaDataGetColumnsWithWrongAccess() throws Exception {
@ -287,10 +277,6 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
con -> con.getMetaData().getColumns("%", "%", "not_created", "%"),
"wrong_access",
con -> con.getMetaData().getColumns("%", "%", "test", "%"));
new AuditLogAsserter()
.expect(true, GetIndexAction.NAME, "test_admin", contains("*", "-*"))
.expect(true, GetIndexAction.NAME, "wrong_access", contains("*", "-*"))
.assertLogs();
}
public void testMetaDataGetColumnsSingleFieldGranted() throws Exception {
@ -300,10 +286,6 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
con -> con.getMetaData().getColumns("%", "%", "test", "a"),
"only_a",
con -> con.getMetaData().getColumns("%", "%", "test", "%"));
new AuditLogAsserter()
.expect(true, GetIndexAction.NAME, "test_admin", contains("test"))
.expect(true, GetIndexAction.NAME, "only_a", contains("test"))
.assertLogs();
}
public void testMetaDataGetColumnsSingleFieldExcepted() throws Exception {
@ -322,9 +304,6 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
assertEquals("b", columnName);
assertFalse(result.next());
}
new AuditLogAsserter()
.expect(true, GetIndexAction.NAME, "not_c", contains("test"))
.assertLogs();
}
public void testMetaDataGetColumnsDocumentExcluded() throws Exception {
@ -334,9 +313,5 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
con -> con.getMetaData().getColumns("%", "%", "test", "%"),
"no_3s",
con -> con.getMetaData().getColumns("%", "%", "test", "%"));
new AuditLogAsserter()
.expect(true, GetIndexAction.NAME, "test_admin", contains("test"))
.expect(true, GetIndexAction.NAME, "no_3s", contains("test"))
.assertLogs();
}
}
}

View File

@ -123,16 +123,16 @@ public class JdbcAssert {
int type = metaData.getColumnType(column);
String msg = format(Locale.ROOT, "Different result for column [" + metaData.getColumnName(column) + "], entry [" + count
+ "]; " + "expected %s but was %s", expectedObject, actualObject);
String msg = format(Locale.ROOT, "Different result for column [" + metaData.getColumnName(column) + "], "
+ "entry [" + (count + 1) + "]");
// handle nulls first
if (expectedObject == null || actualObject == null) {
assertEquals(expectedObject, actualObject);
assertEquals(msg, expectedObject, actualObject);
}
// then timestamp
else if (type == Types.TIMESTAMP || type == Types.TIMESTAMP_WITH_TIMEZONE) {
assertEquals(getTime(expected, column), getTime(actual, column));
assertEquals(msg, getTime(expected, column), getTime(actual, column));
}
// and floats/doubles
else if (type == Types.DOUBLE) {

View File

@ -23,23 +23,28 @@ CREATE TABLE mock (
IS_AUTOINCREMENT VARCHAR,
IS_GENERATEDCOLUMN VARCHAR
) AS
SELECT '', 'test1', 'name', 12, 'VARCHAR', 2147483647, null, null, null,
SELECT null, 'test1', 'name', 12, 'TEXT', 0, null, null, null,
1, -- columnNullable
null, null, null, null, null, 1, 'YES', null, null, null, null, '', ''
null, null, 12, null, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
UNION ALL
SELECT '', 'test2', 'date', 93, 'TIMESTAMP', 19, null, null, null,
SELECT null, 'test1', 'name.keyword', 12, 'KEYWORD', 0, null, null, null,
1, -- columnNullable
null, null, null, null, null, 1, 'YES', null, null, null, null, '', ''
null, null, 12, null, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
UNION ALL
SELECT '', 'test2', 'float', 7, 'REAL', 7, null, null, 2,
SELECT null, 'test2', 'date', 93, 'DATE', 20, null, null, null,
1, -- columnNullable
null, null, null, null, null, 2, 'YES', null, null, null, null, '', ''
null, null, 93, null, null, 1, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
UNION ALL
SELECT '', 'test2', 'number', -5, 'BIGINT', 19, null, null, 10,
SELECT null, 'test2', 'float', 7, 'FLOAT', 15, null, null, 2,
1, -- columnNullable
null, null, null, null, null, 3, 'YES', null, null, null, null, '', ''
null, null, 7, null, null, 2, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
UNION ALL
SELECT null, 'test2', 'number', -5, 'LONG', 20, null, null, 10,
1, -- columnNullable
null, null, -5, null, null, 3, 'YES', null, null, null, null, 'NO', 'NO'
FROM DUAL
;