Get jdbc tests passing

`gradle check -xforbiddenPatterns` now passes in jdbc.

This makes running the embedded HTTP server slightly more difficult,
you now have to add the following to your jvm arguments.
```
-ea -Dtests.rest.cluster=localhost:9200 -Dtests.embed.sql=true -Dtests.security.manager=false
```

Depending on your environment the embedded jdbc connection may give
spurious failures that look like:
```
org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException: RemoteTransportException[[node-0][127.0.0.1:9300][indices:data/read/search]]; nested: SearchPhaseExecutionException[]; nested: GeneralScriptException[Failed to compile inline script [( params.a0 > params.v0 ) && ( params.a1 > params.v1 )] using lang [painless]]; nested: CircuitBreakingException[[script] Too many dynamic script compilations within one minute, max: [15/min]; please use on-disk, indexed, or scripts with parameters instead; this limit can be changed by the [script.max_compilations_per_minute] setting];
...
Caused by: Failed to execute phase [fetch],
..
Caused by: GeneralScriptException[Failed to compile inline script [( params.a0 > params.v0 ) && ( params.a1 > params.v1 )] using lang [painless]]; nested: CircuitBreakingException[[script] Too many dynamic script compilations within one minute, max: [15/min]; please use on-disk, indexed, or scripts with parameters instead; this limit can be changed by the [script.max_compilations_per_minute] setting];
...
Caused by: CircuitBreakingException[[script] Too many dynamic script compilations within one minute, max: [15/min]; please use on-disk, indexed, or scripts with parameters instead; this limit can be changed by the [script.max_compilations_per_minute] setting]
```

`gradle check` works around this by setting `script.max_compilations_per_minute`
to `1000`.

Another change is that we no longer support loading the test data by
uncommenting some code. Instead we load the test data into Elaticsearch
before the first test and we deleted it after the last test. This is
so that tests that required different test data can interoperate with
eachother. The spec tests all use the same test data but the metadata
tests do not.

Original commit: elastic/x-pack-elasticsearch@8b8f684ac1
This commit is contained in:
Nik Everett 2017-07-05 13:38:17 -04:00
parent 18bc094dbc
commit bc2f0fe7ff
38 changed files with 866 additions and 1025 deletions

View File

@ -128,3 +128,20 @@ task runServer(type: RunTask) {
setting 'xpack.watcher.enabled', 'false' setting 'xpack.watcher.enabled', 'false'
run.dependsOn this run.dependsOn this
} }
// Allow for com.sun.net.httpserver.* usage for testing
eclipse {
classpath.file {
whenMerged { cp ->
def con = entries.find { e ->
e.kind == "con" && e.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER")
}
con.accessRules.add(new org.gradle.plugins.ide.eclipse.model.AccessRule(
"accessible", "com/sun/net/httpserver/*"))
}
}
}
forbiddenApisTest {
bundledSignatures -= 'jdk-non-portable'
bundledSignatures += 'jdk-internal'
}

View File

@ -9,11 +9,12 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.xpack.sql.cli.net.protocol.Response; import org.elasticsearch.xpack.sql.cli.net.protocol.Response;
import org.elasticsearch.xpack.sql.test.server.ProtoHttpServer; import org.elasticsearch.xpack.sql.test.server.ProtoHttpServer;
/**
* Internal server used for testing without starting a new Elasticsearch instance.
*/
public class CliHttpServer extends ProtoHttpServer<Response> { public class CliHttpServer extends ProtoHttpServer<Response> {
// NOCOMMIT replace me with the same hack that JdbcIntegrationTestCase uses
public CliHttpServer(Client client) { public CliHttpServer(Client client) {
super(client, new CliProtoHandler(client), "/cli/", "sql/"); super(client, new CliProtoHandler(client), "/cli/");
} }
@Override @Override

View File

@ -44,15 +44,12 @@ dependencies {
compile project(':x-pack-elasticsearch:sql:net-client') compile project(':x-pack-elasticsearch:sql:net-client')
compile project(':x-pack-elasticsearch:sql:jdbc-proto') compile project(':x-pack-elasticsearch:sql:jdbc-proto')
testCompile project(":x-pack-elasticsearch:transport-client") // NOCOMMIT probably can remove this testCompile project(path: ':client:transport', configuration: 'runtime')
testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts') testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts')
testCompile project(':x-pack-elasticsearch:sql:test-utils') testCompile project(':x-pack-elasticsearch:sql:test-utils')
testCompile "net.sourceforge.csvjdbc:csvjdbc:1.0.31"
// Used by the hack to run InternalTestCluster if not running against a gradle-started cluster.
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
testRuntime "com.h2database:h2:1.4.194" testRuntime "com.h2database:h2:1.4.194"
testRuntime "net.sourceforge.csvjdbc:csvjdbc:1.0.31"
} }
dependencyLicenses { dependencyLicenses {
@ -80,6 +77,7 @@ integTestCluster {
setting 'xpack.monitoring.enabled', 'false' setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.ml.enabled', 'false' setting 'xpack.ml.enabled', 'false'
setting 'xpack.watcher.enabled', 'false' setting 'xpack.watcher.enabled', 'false'
setting 'script.max_compilations_per_minute', '1000'
} }
task run(type: RunTask) { task run(type: RunTask) {
@ -91,4 +89,22 @@ task run(type: RunTask) {
setting 'xpack.monitoring.enabled', 'false' setting 'xpack.monitoring.enabled', 'false'
setting 'xpack.ml.enabled', 'false' setting 'xpack.ml.enabled', 'false'
setting 'xpack.watcher.enabled', 'false' setting 'xpack.watcher.enabled', 'false'
setting 'script.max_compilations_per_minute', '1000'
}
// Allow for com.sun.net.httpserver.* usage for testing
eclipse {
classpath.file {
whenMerged { cp ->
def con = entries.find { e ->
e.kind == "con" && e.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER")
}
con.accessRules.add(new org.gradle.plugins.ide.eclipse.model.AccessRule(
"accessible", "com/sun/net/httpserver/*"))
}
}
}
forbiddenApisTest {
bundledSignatures -= 'jdk-non-portable'
bundledSignatures += 'jdk-internal'
} }

View File

@ -433,11 +433,12 @@ public class JdbcConnection implements Connection, JdbcWrapper {
return userName; return userName;
} }
int esInfoMajorVersion() throws SQLException { // NOCOMMIT should this be one of those wrapped things?
public int esInfoMajorVersion() throws SQLException {
return client.serverInfo().majorVersion; return client.serverInfo().majorVersion;
} }
int esInfoMinorVersion() throws SQLException { public int esInfoMinorVersion() throws SQLException {
return client.serverInfo().minorVersion; return client.serverInfo().minorVersion;
} }
} }

View File

@ -1,129 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.xpack.sql.jdbc.framework.TestUtils.index;
/**
* Test the jdbc driver behavior and the connection to Elasticsearch.
*/
public class BasicsIT extends SpecBaseIntegrationTestCase {
// NOCOMMIT these might should move into their own test or be deleted entirely
// public void test01Ping() throws Exception {
// assertThat(client.ping((int) TimeUnit.SECONDS.toMillis(5)), equalTo(true));
// }
//
// public void testInfoAction() throws Exception {
// InfoResponse esInfo = client.serverInfo();
// assertThat(esInfo, notNullValue());
// assertThat(esInfo.cluster, is("elasticsearch"));
// assertThat(esInfo.node, not(isEmptyOrNullString()));
// assertThat(esInfo.versionHash, not(isEmptyOrNullString()));
// assertThat(esInfo.versionString, startsWith("5."));
// assertThat(esInfo.majorVersion, is(5));
// //assertThat(esInfo.minorVersion(), is(0));
// }
//
// public void testInfoTable() throws Exception {
// List<String> tables = client.metaInfoTables("emp*");
// assertThat(tables.size(), greaterThanOrEqualTo(1));
// assertThat(tables, hasItem("emp.emp"));
// }
//
// public void testInfoColumn() throws Exception {
// List<MetaColumnInfo> info = client.metaInfoColumns("em*", null);
// for (MetaColumnInfo i : info) {
// // NOCOMMIT test these
// logger.info(i);
// }
// }
public void testConnectionProperties() throws SQLException {
j.consume(c -> {
assertFalse(c.isClosed());
assertTrue(c.isReadOnly());
});
}
/**
* Tests that we throw report no transaction isolation and throw sensible errors if you ask for any.
*/
public void testTransactionIsolation() throws Exception {
j.consume(c -> {
assertEquals(Connection.TRANSACTION_NONE, c.getTransactionIsolation());
SQLException e = expectThrows(SQLException.class, () -> c.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE));
assertEquals("Transactions not supported", e.getMessage());
assertEquals(Connection.TRANSACTION_NONE, c.getTransactionIsolation());
});
}
public void testShowTablesEmpty() throws Exception {
List<Map<String, Object>> results = j.queryForList("SHOW TABLES");
assertEquals(emptyList(), results);
}
public void testShowTablesWithAnIndex() throws Exception {
index("test", builder -> builder.field("name", "bob"));
List<Map<String, Object>> results = j.queryForList("SHOW TABLES");
List<Map<String, Object>> expected = new ArrayList<>();
Map<String, Object> index = new HashMap<>();
index.put("index", "test");
index.put("type", "doc");
expected.add(index);
assertEquals(expected, results);
}
public void testShowTablesWithManyIndices() throws Exception {
int indices = between(2, 20);
for (int i = 0; i < indices; i++) {
index("test" + i, builder -> builder.field("name", "bob"));
}
List<Map<String, Object>> results = j.queryForList("SHOW TABLES");
results.sort(Comparator.comparing(map -> map.get("index").toString()));
List<Map<String, Object>> expected = new ArrayList<>();
for (int i = 0; i < indices; i++) {
Map<String, Object> index = new HashMap<>();
index.put("index", "test" + i);
index.put("type", "doc");
expected.add(index);
}
expected.sort(Comparator.comparing(map -> map.get("index").toString()));
assertEquals(expected, results);
}
public void testBasicSelect() throws Exception {
index("test", builder -> builder.field("name", "bob"));
List<Map<String, Object>> results = j.queryForList("SELECT * from test.doc");
assertEquals(singletonList(singletonMap("name", "bob")), results);
}
public void testSelectFromMissingTable() throws Exception {
SQLException e = expectThrows(SQLException.class, () -> j.queryForList("SELECT * from test.doc"));
assertEquals("line 1:15: Cannot resolve index test", e.getMessage());
}
public void testSelectFromMissingType() throws Exception {
index("test", builder -> builder.field("name", "bob"));
SQLException e = expectThrows(SQLException.class, () -> j.queryForList("SELECT * from test.notdoc"));
assertEquals("line 1:15: Cannot resolve type notdoc in index test", e.getMessage());
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.Version;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConnection;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Test the jdbc {@link Connection} implementation.
*/
public class ConnectionIT extends JdbcIntegrationTestCase {
public void testConnectionProperties() throws SQLException {
try (Connection c = esJdbc()) {
assertFalse(c.isClosed());
assertTrue(c.isReadOnly());
assertEquals(Version.CURRENT.major, ((JdbcConnection) c).esInfoMajorVersion());
assertEquals(Version.CURRENT.minor, ((JdbcConnection) c).esInfoMinorVersion());
}
}
public void testIsValid() throws SQLException {
try (Connection c = esJdbc()) {
assertTrue(c.isValid(10));
}
}
/**
* Tests that we throw report no transaction isolation and throw sensible errors if you ask for any.
*/
public void testTransactionIsolation() throws Exception {
try (Connection c = esJdbc()) {
assertEquals(Connection.TRANSACTION_NONE, c.getTransactionIsolation());
SQLException e = expectThrows(SQLException.class, () -> c.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE));
assertEquals("Transactions not supported", e.getMessage());
assertEquals(Connection.TRANSACTION_NONE, c.getTransactionIsolation());
}
}
}

View File

@ -0,0 +1,94 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import java.nio.file.Path;
import java.util.List;
import java.util.Locale;
import static java.lang.String.format;
/**
* Tests comparing sql queries executed against our jdbc client
* with hard coded result sets.
*/
public class CsvSpecIT extends SpecBaseIntegrationTestCase {
private final CsvTestCase testCase;
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTNG) // NOCOMMIT are we sure?!
public static List<Object[]> readScriptSpec() throws Exception {
CsvSpecParser parser = new CsvSpecParser();
return CollectionUtils.combine(
readScriptSpec("/command.csv-spec", parser),
readScriptSpec("/fulltext.csv-spec", parser));
}
public CsvSpecIT(String groupName, String testName, Integer lineNumber, Path source, CsvTestCase testCase) {
super(groupName, testName, lineNumber, source);
this.testCase = testCase;
}
public void test() throws Throwable {
try {
assertMatchesCsv(testCase.query, testName, testCase.expectedResults);
} catch (AssertionError ae) {
throw reworkException(new AssertionError(errorMessage(ae), ae.getCause()));
} catch (Throwable th) {
throw reworkException(th);
}
}
String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber,
testCase.query, th.getMessage());
}
private static class CsvSpecParser implements Parser {
private final StringBuilder data = new StringBuilder();
private CsvTestCase testCase;
@Override
public Object parse(String line) {
// beginning of the section
if (testCase == null) {
// pick up the query
testCase = new CsvTestCase();
testCase.query = line.endsWith(";") ? line.substring(0, line.length() - 1) : line;
}
else {
// read CSV header
// if (fragment.columnNames == null) {
// fragment.columnNames = line;
// }
// read data
if (line.startsWith(";")) {
testCase.expectedResults = data.toString();
// clean-up and emit
CsvTestCase result = testCase;
testCase = null;
data.setLength(0);
return result;
}
else {
data.append(line);
data.append("\r\n");
}
}
return null;
}
}
private static class CsvTestCase {
String query;
String expectedResults;
}
}

View File

@ -5,196 +5,83 @@
*/ */
package org.elasticsearch.xpack.sql.jdbc; package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase; import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
import java.sql.Connection;
import java.sql.DatabaseMetaData; import java.sql.DatabaseMetaData;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Types;
import static org.hamcrest.Matchers.startsWith; import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
/** /**
* Tests for our implementation of {@link DatabaseMetaData}. * Tests for our implementation of {@link DatabaseMetaData}.
*/ */
public class DatabaseMetaDataIT extends SpecBaseIntegrationTestCase { public class DatabaseMetaDataIT extends JdbcIntegrationTestCase {
/** /**
* We do not support procedures so we return an empty set for {@link DatabaseMetaData#getProcedures(String, String, String)}. * We do not support procedures so we return an empty set for
* {@link DatabaseMetaData#getProcedures(String, String, String)}.
*/ */
public void testMetadataGetProcedures() throws Exception { public void testGetProcedures() throws Exception {
j.consume(c -> { try (Connection h2 = LocalH2.anonymousDb();
DatabaseMetaData metaData = c.getMetaData(); Connection es = esJdbc()) {
ResultSet results = metaData.getProcedures( h2.createStatement().executeUpdate("RUNSCRIPT FROM 'classpath:/setup_mock_metadata_get_procedures.sql'");
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(5));
ResultSetMetaData meta = results.getMetaData();
int i = 1;
assertColumn("PROCEDURE_CAT", "VARCHAR", meta, i++);
assertColumn("PROCEDURE_SCHEM", "VARCHAR", meta, i++);
assertColumn("PROCEDURE_NAME", "VARCHAR", meta, i++);
assertColumn("NUM_INPUT_PARAMS", "INTEGER", meta, i++);
assertColumn("NUM_OUTPUT_PARAMS", "INTEGER", meta, i++);
assertColumn("NUM_RESULT_SETS", "INTEGER", meta, i++);
assertColumn("REMARKS", "VARCHAR", meta, i++);
assertColumn("PROCEDURE_TYPE", "SMALLINT", meta, i++);
assertColumn("SPECIFIC_NAME", "VARCHAR", meta, i++);
assertEquals(i - 1, meta.getColumnCount());
assertFalse(results.next()); ResultSet expected = h2.createStatement().executeQuery("SELECT * FROM mock");
}); assertResultSets(expected, es.getMetaData().getProcedures(
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(5)));
}
} }
public void testMetadataGetProcedureColumns() throws Exception { /**
j.consume(c -> { * We do not support procedures so we return an empty set for
DatabaseMetaData metaData = c.getMetaData(); * {@link DatabaseMetaData#getProcedureColumns(String, String, String, String)}.
ResultSet results = metaData.getProcedureColumns( */
randomBoolean() ? null : randomAlphaOfLength(5), public void testGetProcedureColumns() throws Exception {
randomBoolean() ? null : randomAlphaOfLength(5), try (Connection h2 = LocalH2.anonymousDb();
randomBoolean() ? null : randomAlphaOfLength(5), Connection es = esJdbc()) {
randomBoolean() ? null : randomAlphaOfLength(5)); h2.createStatement().executeUpdate("RUNSCRIPT FROM 'classpath:/setup_mock_metadata_get_procedure_columns.sql'");
ResultSetMetaData meta = results.getMetaData();
int i = 1;
assertColumn("PROCEDURE_CAT", "VARCHAR", meta, i++);
assertColumn("PROCEDURE_SCHEM", "VARCHAR", meta, i++);
assertColumn("PROCEDURE_NAME", "VARCHAR", meta, i++);
assertColumn("COLUMN_NAME", "VARCHAR", meta, i++);
assertColumn("COLUMN_TYPE", "SMALLINT", meta, i++);
assertColumn("DATA_TYPE", "INTEGER", meta, i++);
assertColumn("TYPE_NAME", "VARCHAR", meta, i++);
assertColumn("PRECISION", "INTEGER", meta, i++);
assertColumn("LENGTH", "INTEGER", meta, i++);
assertColumn("SCALE", "SMALLINT", meta, i++);
assertColumn("RADIX", "SMALLINT", meta, i++);
assertColumn("NULLABLE", "SMALLINT", meta, i++);
assertColumn("REMARKS", "VARCHAR", meta, i++);
assertColumn("COLUMN_DEF", "VARCHAR", meta, i++);
assertColumn("SQL_DATA_TYPE", "INTEGER", meta, i++);
assertColumn("SQL_DATETIME_SUB", "INTEGER", meta, i++);
assertColumn("CHAR_OCTET_LENGTH", "INTEGER", meta, i++);
assertColumn("ORDINAL_POSITION", "INTEGER", meta, i++);
assertColumn("IS_NULLABLE", "VARCHAR", meta, i++);
assertColumn("SPECIFIC_NAME", "VARCHAR", meta, i++);
assertEquals(i - 1, meta.getColumnCount());
assertFalse(results.next()); ResultSet expected = h2.createStatement().executeQuery("SELECT * FROM mock");
}); assertResultSets(expected, es.getMetaData().getProcedureColumns(
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(5),
randomBoolean() ? null : randomAlphaOfLength(5)));
}
} }
public void testMetadataGetTables() throws Exception { public void testGetTables() throws Exception {
index("test", body -> body.field("name", "bob")); index("test", body -> body.field("name", "bob"));
j.consume(c -> {
DatabaseMetaData metaData = c.getMetaData();
ResultSet results = metaData.getTables("%", "%", "%", null);
ResultSetMetaData meta = results.getMetaData();
int i = 1;
assertColumn("TABLE_CAT", "VARCHAR", meta, i++);
assertColumn("TABLE_SCHEM", "VARCHAR", meta, i++);
assertColumn("TABLE_NAME", "VARCHAR", meta, i++);
assertColumn("TABLE_TYPE", "VARCHAR", meta, i++);
assertColumn("REMARKS", "VARCHAR", meta, i++);
assertColumn("TYPE_CAT", "VARCHAR", meta, i++);
assertColumn("TYPE_SCHEM", "VARCHAR", meta, i++);
assertColumn("TYPE_NAME", "VARCHAR", meta, i++);
assertColumn("SELF_REFERENCING_COL_NAME", "VARCHAR", meta, i++);
assertColumn("REF_GENERATION", "VARCHAR", meta, i++);
assertEquals(i - 1, meta.getColumnCount());
assertTrue(results.next()); try (Connection h2 = LocalH2.anonymousDb();
i = 1; Connection es = esJdbc()) {
assertThat(results.getString(i++), startsWith("x-pack-elasticsearch_sql_jdbc_")); h2.createStatement().executeUpdate("RUNSCRIPT FROM 'classpath:/setup_mock_metadata_get_tables.sql'");
assertEquals("", results.getString(i++));
assertEquals("test.doc", results.getString(i++));
assertEquals("TABLE", results.getString(i++));
assertEquals("", results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertFalse(results.next());
results = metaData.getTables("%", "%", "te%", null); CheckedSupplier<ResultSet, SQLException> expected = () ->
assertTrue(results.next()); h2.createStatement().executeQuery("SELECT '" + clusterName() + "' AS TABLE_CAT, * FROM mock");
assertEquals("test.doc", results.getString(3)); assertResultSets(expected.get(), es.getMetaData().getTables("%", "%", "%", null));
assertFalse(results.next()); assertResultSets(expected.get(), es.getMetaData().getTables("%", "%", "te%", null));
// NOCOMMIT with a wildcard type is broken:
results = metaData.getTables("%", "%", "test.d%", null); // assertResultSets(expected.get(), es.getMetaData().getTables("%", "%", "test.d%", null));
assertTrue(results.next()); }
assertEquals("test.doc", results.getString(3));
assertFalse(results.next());
});
} }
public void testMetadataColumns() throws Exception { public void testColumns() throws Exception {
index("test", body -> body.field("name", "bob")); index("test", body -> body.field("name", "bob"));
j.consume(c -> {
DatabaseMetaData metaData = c.getMetaData();
ResultSet results = metaData.getColumns("%", "%", "%", null);
ResultSetMetaData meta = results.getMetaData();
int i = 1;
assertColumn("TABLE_CAT", "VARCHAR", meta, i++);
assertColumn("TABLE_SCHEM", "VARCHAR", meta, i++);
assertColumn("TABLE_NAME", "VARCHAR", meta, i++);
assertColumn("COLUMN_NAME", "VARCHAR", meta, i++);
assertColumn("DATA_TYPE", "INTEGER", meta, i++);
assertColumn("TYPE_NAME", "VARCHAR", meta, i++);
assertColumn("COLUMN_SIZE", "INTEGER", meta, i++);
assertColumn("BUFFER_LENGTH", "NULL", meta, i++);
assertColumn("DECIMAL_DIGITS", "INTEGER", meta, i++);
assertColumn("NUM_PREC_RADIX", "INTEGER", meta, i++);
assertColumn("NULLABLE", "INTEGER", meta, i++);
assertColumn("REMARKS", "VARCHAR", meta, i++);
assertColumn("COLUMN_DEF", "VARCHAR", meta, i++);
assertColumn("SQL_DATA_TYPE", "INTEGER", meta, i++);
assertColumn("SQL_DATETIME_SUB", "INTEGER", meta, i++);
assertColumn("CHAR_OCTET_LENGTH", "INTEGER", meta, i++);
assertColumn("ORDINAL_POSITION", "INTEGER", meta, i++);
assertColumn("IS_NULLABLE", "VARCHAR", meta, i++);
assertColumn("SCOPE_CATALOG", "VARCHAR", meta, i++);
assertColumn("SCOPE_SCHEMA", "VARCHAR", meta, i++);
assertColumn("SCOPE_TABLE", "VARCHAR", meta, i++);
assertColumn("SOURCE_DATA_TYPE", "SMALLINT", meta, i++);
assertColumn("IS_AUTOINCREMENT", "VARCHAR", meta, i++);
assertColumn("IS_GENERATEDCOLUMN", "VARCHAR", meta, i++);
assertEquals(i - 1, meta.getColumnCount());
assertTrue(results.next()); try (Connection h2 = LocalH2.anonymousDb();
i = 1; Connection es = esJdbc()) {
assertThat(results.getString(i++), startsWith("x-pack-elasticsearch_sql_jdbc_")); h2.createStatement().executeUpdate("RUNSCRIPT FROM 'classpath:/setup_mock_metadata_get_columns.sql'");
assertEquals("", results.getString(i++));
assertEquals("test.doc", results.getString(i++));
assertEquals("name", results.getString(i++));
assertEquals(Types.VARCHAR, results.getInt(i++));
assertEquals("VARCHAR", results.getString(i++));
assertEquals(1, results.getInt(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(10, results.getInt(i++)); // NOCOMMIT 10 seems wrong to hard code for stuff like strings
// NOCOMMIT I think it'd be more correct to return DatabaseMetaData.columnNullable because all fields are nullable in es
assertEquals(DatabaseMetaData.columnNullableUnknown, results.getInt(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(1, results.getInt(i++));
assertEquals("", results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals(null, results.getString(i++));
assertEquals("", results.getString(i++));
assertEquals("", results.getString(i++));
assertFalse(results.next());
});
// NOCOMMIT add some more columns and test that. ResultSet expected = h2.createStatement().executeQuery("SELECT '" + clusterName() + "' AS TABLE_CAT, * FROM mock");
} assertResultSets(expected, es.getMetaData().getColumns("%", "%", "%", null));
}
private static void assertColumn(String name, String type, ResultSetMetaData meta, int index) throws SQLException { // NOCOMMIT add some more tables and more columns and test that.
assertEquals(name, meta.getColumnName(index));
assertEquals(type, meta.getColumnTypeName(index));
} }
} }

View File

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcIntegrationTestCase;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Tests for error messages.
*/
public class ErrorsIT extends JdbcIntegrationTestCase {
public void testSelectFromMissingTable() throws Exception {
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * from test.doc").executeQuery());
assertEquals("line 1:15: Cannot resolve index test", e.getMessage());
}
}
public void testSelectFromMissingType() throws Exception {
index("test", builder -> builder.field("name", "bob"));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * from test.notdoc").executeQuery());
assertEquals("line 1:15: Cannot resolve type notdoc in index test", e.getMessage());
}
}
}

View File

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.Locale;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
public class ShowTablesIT extends JdbcIntegrationTestCase {
public void testShowTablesWithoutAnyIndexes() throws Exception {
try (Connection h2 = LocalH2.anonymousDb();
Connection es = esJdbc()) {
h2.createStatement().executeUpdate("RUNSCRIPT FROM 'classpath:/setup_mock_show_tables.sql'");
ResultSet expected = h2.createStatement().executeQuery("SELECT * FROM mock");
assertResultSets(expected, es.createStatement().executeQuery("SHOW TABLES"));
}
}
public void testShowTablesWithManyIndices() throws Exception {
try (Connection h2 = LocalH2.anonymousDb();
Connection es = esJdbc()) {
h2.createStatement().executeUpdate("RUNSCRIPT FROM 'classpath:/setup_mock_show_tables.sql'");
int indices = between(2, 20);
for (int i = 0; i < indices; i++) {
String index = String.format(Locale.ROOT, "test%02d", i);
index(index, builder -> builder.field("name", "bob"));
h2.createStatement().executeUpdate("INSERT INTO mock VALUES ('" + index + "', 'doc');");
}
ResultSet expected = h2.createStatement().executeQuery("SELECT * FROM mock ORDER BY index, type");
assertResultSets(expected, es.createStatement().executeQuery("SHOW TABLES"));
}
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.sql.jdbc.h2; package org.elasticsearch.xpack.sql.jdbc;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
@ -11,20 +11,21 @@ import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase; import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import org.elasticsearch.xpack.sql.util.CollectionUtils; import org.elasticsearch.xpack.sql.util.CollectionUtils;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test;
import java.nio.file.Path; import java.nio.file.Path;
import java.sql.Connection; import java.sql.Connection;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import static java.lang.String.format; import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets; import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
public class SqlSpecIntegrationTest extends SpecBaseIntegrationTestCase { /**
* Tests comparing sql queries executed against our jdbc client
* with those executed against H2's jdbc client.
*/
public class SqlSpecIT extends SpecBaseIntegrationTestCase {
private String query; private String query;
@ClassRule @ClassRule
@ -48,20 +49,14 @@ public class SqlSpecIntegrationTest extends SpecBaseIntegrationTestCase {
} }
} }
public Connection h2Con() throws SQLException { public SqlSpecIT(String groupName, String testName, Integer lineNumber, Path source, String query) {
return H2.get();
}
public SqlSpecIntegrationTest(String groupName, String testName, Integer lineNumber, Path source, String query) {
super(groupName, testName, lineNumber, source); super(groupName, testName, lineNumber, source);
this.query = query; this.query = query;
} }
@Test public void test() throws Throwable {
public void testQuery() throws Throwable { try (Connection h2 = H2.get();
// H2 resultset Connection es = esJdbc()) {
try (Connection h2 = h2Con();
Connection es = esCon()) {
ResultSet expected, actual; ResultSet expected, actual;
try { try {
expected = h2.createStatement().executeQuery(query); expected = h2.createStatement().executeQuery(query);

View File

@ -1,154 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.csv;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.xpack.sql.jdbc.framework.CsvSpecTableReader;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import org.junit.AfterClass;
import org.junit.Test;
import org.relique.jdbc.csv.CsvDriver;
import java.io.Reader;
import java.io.StringReader;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
public class CsvSpecIntegrationTest extends SpecBaseIntegrationTestCase {
private static CsvDriver DRIVER = new CsvDriver();
public static final Map<Connection, Reader> CSV_READERS = new LinkedHashMap<>();
private final CsvFragment fragment;
@AfterClass
public static void cleanup() throws Exception {
CSV_READERS.clear();
}
public static CheckedSupplier<Connection, SQLException> csvCon(Properties props, Reader reader) {
return new CheckedSupplier<Connection, SQLException>() {
@Override
public Connection get() throws SQLException {
Connection con = DRIVER.connect("jdbc:relique:csv:class:" + CsvSpecTableReader.class.getName(), props);
CSV_READERS.put(con, reader);
return con;
}
};
}
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTNG)
public static List<Object[]> readScriptSpec() throws Exception {
CsvSpecParser parser = new CsvSpecParser();
return CollectionUtils.combine(
readScriptSpec("/command.csv-spec", parser),
readScriptSpec("/fulltext.csv-spec", parser));
}
public CsvSpecIntegrationTest(String groupName, String testName, Integer lineNumber, Path source, CsvFragment fragment) {
super(groupName, testName, lineNumber, source);
this.fragment = fragment;
}
@Test
public void testQuery() throws Throwable {
// hook CSV reader, which picks the current test context
try (Connection csv = csvCon(fragment.asProps(), fragment.reader).get();
Connection es = esCon()) {
ResultSet expected, actual;
try {
// pass the testName as table for debugging purposes (in case the underlying reader is missing)
expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY).executeQuery("SELECT * FROM " + testName);
// trigger data loading for type inference
expected.beforeFirst();
actual = es.createStatement().executeQuery(fragment.query);
assertResultSets(expected, actual);
} catch (AssertionError ae) {
throw reworkException(new AssertionError(errorMessage(ae), ae.getCause()));
}
} catch (Throwable th) {
throw reworkException(th);
}
}
String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber, fragment.query, th.getMessage());
}
private static class CsvSpecParser implements Parser {
private final StringBuilder data = new StringBuilder();
private CsvFragment fragment;
@Override
public Object parse(String line) {
// beginning of the section
if (fragment == null) {
// pick up the query
fragment = new CsvFragment();
fragment.query = line.endsWith(";") ? line.substring(0, line.length() - 1) : line;
}
else {
// read CSV header
// if (fragment.columnNames == null) {
// fragment.columnNames = line;
// }
// read data
if (line.startsWith(";")) {
CsvFragment f = fragment;
f.reader = new StringReader(data.toString());
// clean-up
fragment = null;
data.setLength(0);
return f;
}
else {
data.append(line);
data.append("\r\n");
}
}
return null;
}
}
private static class CsvFragment {
String query;
String columnNames;
List<String> columnTypes;
Reader reader;
private static final Properties DEFAULT = new Properties();
static {
DEFAULT.setProperty("charset", "UTF-8");
// trigger auto-detection
DEFAULT.setProperty("columnTypes", "");
DEFAULT.setProperty("separator", "|");
DEFAULT.setProperty("trimValues", "true");
}
Properties asProps() {
// p.setProperty("suppressHeaders", "true");
// p.setProperty("headerline", columnNames);
return DEFAULT;
}
}
}

View File

@ -0,0 +1,16 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.common.CheckedSupplier;
import org.junit.rules.ExternalResource;
import java.sql.Connection;
import java.sql.SQLException;
public abstract class AbstractJdbcConnectionSource extends ExternalResource implements CheckedSupplier<Connection, SQLException> {
}

View File

@ -1,32 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.xpack.sql.jdbc.csv.CsvSpecIntegrationTest;
import org.relique.io.TableReader;
import java.io.Reader;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
public class CsvSpecTableReader implements TableReader {
@Override
public Reader getReader(Statement statement, String tableName) throws SQLException {
Reader reader = CsvSpecIntegrationTest.CSV_READERS.remove(statement.getConnection());
if (reader == null) {
throw new RuntimeException("Cannot find reader for test " + tableName);
}
return reader;
}
@Override
public List<String> getTableNames(Connection connection) throws SQLException {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,79 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import java.net.InetAddress;
import java.security.AccessControlException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
import static org.junit.Assert.assertNotNull;
/**
* Embedded JDBC server that uses the transport client to power
* the jdbc endpoints in the same JVM as the tests.
*/
public class EmbeddedJdbcServer extends AbstractJdbcConnectionSource {
private Client client;
private JdbcHttpServer server;
private String jdbcUrl;
private final Properties properties;
public EmbeddedJdbcServer() {
this(false);
}
public EmbeddedJdbcServer(boolean debug) {
properties = new Properties();
if (debug) {
properties.setProperty("debug", "true");
}
}
@Override
@SuppressWarnings("resource")
protected void before() throws Throwable {
try {
Settings settings = Settings.builder()
.put("client.transport.ignore_cluster_name", true)
.build();
client = new PreBuiltTransportClient(settings)
.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), 9300));
} catch (ExceptionInInitializerError e) {
if (e.getCause() instanceof AccessControlException) {
throw new RuntimeException(getClass().getSimpleName() + " is not available with the security manager", e);
} else {
throw e;
}
}
server = new JdbcHttpServer(client);
server.start(0);
jdbcUrl = server.url();
}
@Override
protected void after() {
client.close();
client = null;
server.stop();
server = null;
}
@Override
public Connection get() throws SQLException {
assertNotNull("ES JDBC Server is null - make sure ES is properly run as a @ClassRule", server);
return DriverManager.getConnection(jdbcUrl, properties);
}
}

View File

@ -1,70 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver;
import org.junit.rules.ExternalResource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import static org.junit.Assert.assertNotNull;
public class EsJdbcServer extends ExternalResource implements CheckedSupplier<Connection, SQLException> {
private Client client;
private JdbcHttpServer server;
private String jdbcUrl;
private JdbcDriver driver;
private final Properties properties;
public EsJdbcServer() {
this(false);
}
public EsJdbcServer(boolean debug) {
properties = new Properties();
if (debug) {
properties.setProperty("debug", "true");
}
}
@Override
protected void before() throws Throwable {
client = TestUtils.client();
server = new JdbcHttpServer(client);
driver = new JdbcDriver();
server.start(0);
jdbcUrl = server.url();
System.out.println("Started JDBC Server at " + jdbcUrl);
}
@Override
protected void after() {
client.close();
client = null;
server.stop();
server = null;
System.out.println("Stopped JDBC Server at " + jdbcUrl);
}
public Client client() {
assertNotNull("ES JDBC Server is null - make sure ES is properly run as a @ClassRule", driver);
return server.client();
}
@Override
public Connection get() throws SQLException {
assertNotNull("ES JDBC Server is null - make sure ES is properly run as a @ClassRule", driver);
return driver.connect(jdbcUrl, properties);
}
}

View File

@ -9,12 +9,17 @@ import java.sql.ResultSet;
import java.sql.ResultSetMetaData; import java.sql.ResultSetMetaData;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Types; import java.sql.Types;
import java.util.Calendar;
import java.util.Locale;
import java.util.TimeZone;
import static org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcUtils.nameOf; import static org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcUtils.nameOf;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
public class JdbcAssert { public class JdbcAssert {
private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
public static void assertResultSets(ResultSet expected, ResultSet actual) throws SQLException { public static void assertResultSets(ResultSet expected, ResultSet actual) throws SQLException {
assertResultSetMetadata(expected, actual); assertResultSetMetadata(expected, actual);
assertResultSetData(expected, actual); assertResultSetData(expected, actual);
@ -86,6 +91,6 @@ public class JdbcAssert {
} }
private static Object getTime(ResultSet rs, int column) throws SQLException { private static Object getTime(ResultSet rs, int column) throws SQLException {
return rs.getTime(column, TestUtils.UTC_CALENDAR).getTime(); return rs.getTime(column, UTC_CALENDAR).getTime();
} }
} }

View File

@ -9,6 +9,9 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Response; import org.elasticsearch.xpack.sql.jdbc.net.protocol.Response;
import org.elasticsearch.xpack.sql.test.server.ProtoHttpServer; import org.elasticsearch.xpack.sql.test.server.ProtoHttpServer;
/**
* Internal server used for testing without starting a new Elasticsearch instance.
*/
public class JdbcHttpServer extends ProtoHttpServer<Response> { public class JdbcHttpServer extends ProtoHttpServer<Response> {
public JdbcHttpServer(Client client) { public JdbcHttpServer(Client client) {

View File

@ -0,0 +1,179 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.sql.jdbc.SqlSpecIT;
import org.junit.ClassRule;
import org.relique.io.TableReader;
import org.relique.jdbc.csv.CsvConnection;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
/**
* Should the HTTP server that serves SQL be embedded in the test
* process (true) or should the JDBC driver connect to Elasticsearch
* running at {@code tests.rest.cluster}. Note that to use embedded
* HTTP you have to have Elasticsearch's transport protocol open on
* port 9300 but the Elasticsearch running there does not need to have
* the SQL plugin installed. Note also that embedded HTTP is faster
* but is not canonical because it runs against a different HTTP server
* then JDBC will use in production. Gradle always uses non-embedded.
*/
private static final boolean EMBED_SQL = Booleans.parseBoolean(System.getProperty("tests.embed.sql", "false"));
/**
* Properties used when settings up a CSV-based jdbc connection.
*/
private static final Properties CSV_PROPERTIES = new Properties();
static {
CSV_PROPERTIES.setProperty("charset", "UTF-8");
// trigger auto-detection
CSV_PROPERTIES.setProperty("columnTypes", "");
CSV_PROPERTIES.setProperty("separator", "|");
CSV_PROPERTIES.setProperty("trimValues", "true");
}
@ClassRule
public static final AbstractJdbcConnectionSource ES = EMBED_SQL ? new EmbeddedJdbcServer() : new AbstractJdbcConnectionSource() {
@Override
public Connection get() throws SQLException {
return DriverManager.getConnection("jdbc:es://" + System.getProperty("tests.rest.cluster"));
}
};
public Connection esJdbc() throws SQLException {
return ES.get();
}
public static void index(String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
body.accept(builder);
builder.endObject();
HttpEntity doc = new StringEntity(builder.string(), ContentType.APPLICATION_JSON);
client().performRequest("PUT", "/" + index + "/doc/1", singletonMap("refresh", "true"), doc);
}
public void assertMatchesCsv(String query, String csvTableName, String expectedResults) throws SQLException {
Reader reader = new StringReader(expectedResults);
TableReader tableReader = new TableReader() {
@Override
public Reader getReader(Statement statement, String tableName) throws SQLException {
return reader;
}
@Override
public List<String> getTableNames(Connection connection) throws SQLException {
throw new UnsupportedOperationException();
}
};
try (Connection csv = new CsvConnection(tableReader, CSV_PROPERTIES, "") {};
Connection es = esJdbc()) {
// pass the testName as table for debugging purposes (in case the underlying reader is missing)
ResultSet expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)
.executeQuery("SELECT * FROM " + csvTableName);
// trigger data loading for type inference
expected.beforeFirst();
ResultSet actual = es.createStatement().executeQuery(query);
assertResultSets(expected, actual);
}
}
protected String clusterName() {
try {
String response = EntityUtils.toString(client().performRequest("GET", "/").getEntity());
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false).get("cluster_name").toString();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected static void loadDatasetIntoEs() throws Exception {
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
createIndex.startObject("settings"); {
createIndex.field("number_of_shards", 1);
}
createIndex.endObject();
createIndex.startObject("mappings"); {
createIndex.startObject("emp");
{
createIndex.startObject("properties"); {
createIndex.startObject("emp_no").field("type", "integer").endObject();
createIndex.startObject("birth_date").field("type", "date").endObject();
createIndex.startObject("first_name").field("type", "text").endObject();
createIndex.startObject("last_name").field("type", "text").endObject();
createIndex.startObject("gender").field("type", "keyword").endObject();
createIndex.startObject("hire_date").field("type", "date").endObject();
}
createIndex.endObject();
}
createIndex.endObject();
}
createIndex.endObject().endObject();
client().performRequest("PUT", "/test_emp", emptyMap(), new StringEntity(createIndex.string(), ContentType.APPLICATION_JSON));
StringBuilder bulk = new StringBuilder();
csvToLines("employees", (titles, fields) -> {
bulk.append("{\"index\":{}}\n");
bulk.append('{');
for (int f = 0; f < fields.size(); f++) {
if (f != 0) {
bulk.append(',');
}
bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"');
}
bulk.append("}\n");
});
client().performRequest("POST", "/test_emp/emp/_bulk", singletonMap("refresh", "true"), new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
}
private static void csvToLines(String name, CheckedBiConsumer<List<String>, List<String>, Exception> consumeLine) throws Exception {
String location = "/" + name + ".csv";
URL dataSet = SqlSpecIT.class.getResource(location);
if (dataSet == null) {
throw new IllegalArgumentException("Can't find [" + location + "]");
}
List<String> lines = Files.readAllLines(PathUtils.get(dataSet.toURI()));
if (lines.isEmpty()) {
throw new IllegalArgumentException("[" + location + "] must contain at least a title row");
}
List<String> titles = Arrays.asList(lines.get(0).split(","));
for (int l = 1; l < lines.size(); l++) {
consumeLine.accept(titles, Arrays.asList(lines.get(l).split(",")));
}
}
}

View File

@ -1,179 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.CheckedSupplier;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
// poor's man JdbcTemplate
public class JdbcTemplate {
private final CheckedSupplier<Connection, SQLException> conn;
public JdbcTemplate(CheckedSupplier<Connection, SQLException> conn) {
this.conn = conn;
}
public void consume(CheckedConsumer<Connection, SQLException> c) throws SQLException {
try (Connection con = conn.get()) {
c.accept(con);
}
}
public <T> T map(CheckedFunction<Connection, T, SQLException> c) throws SQLException {
try (Connection con = conn.get()) {
return c.apply(con);
}
}
public <T> T query(String q, CheckedFunction<ResultSet, T, SQLException> f) throws SQLException {
return map(c -> {
try (Statement st = c.createStatement();
ResultSet rset = st.executeQuery(q)) {
return f.apply(rset);
}
});
}
public <T> T queryObject(String q, Class<T> type) throws SQLException {
return query(q, singleResult(type));
}
public void execute(String query) throws Exception {
map(c -> {
try (Statement st = c.createStatement()) {
st.execute(query);
return null;
}
});
}
public <T> T execute(String query, CheckedFunction<PreparedStatement, T, SQLException> callback) throws SQLException {
return map(c -> {
try (PreparedStatement ps = c.prepareStatement(query)) {
return callback.apply(ps);
}
});
}
public <T> T execute(String query, CheckedConsumer<PreparedStatement, SQLException> prepare,
CheckedFunction<ResultSet, T, SQLException> mapper) throws SQLException {
return execute(query, ps -> {
prepare.accept(ps);
try (ResultSet rs = ps.executeQuery()) {
return mapper.apply(rs);
}
});
}
public <T> T query(String q, CheckedFunction<ResultSet, T, SQLException> mapper, Object... args) throws SQLException {
CheckedConsumer<PreparedStatement, SQLException> p = ps -> {
if (args != null) {
for (int i = 0; i < args.length; i++) {
ps.setObject(i + 1, args[i]);
}
}
};
return execute(q, p, mapper);
}
public <T> T queryObject(String q, Class<T> type, Object...args) throws Exception {
return query(q, singleResult(type), args);
}
public <T> List<T> queryForList(String q, CheckedBiFunction<ResultSet, Integer, T, SQLException> mapper, Object... args)
throws Exception {
CheckedFunction<ResultSet, List<T>, SQLException> f = rs -> {
List<T> list = new ArrayList<>();
while (rs.next()) {
list.add(mapper.apply(rs, rs.getRow()));
}
return list;
};
return query(q, f, args);
}
public <T> List<T> queryForList(String q, Class<T> type, Object... args) throws Exception {
CheckedBiFunction<ResultSet, Integer, T, SQLException> mapper = (rs, i) -> {
if (i != 1) {
throw new IllegalArgumentException("Expected exactly one column...");
}
return convertObject(rs.getObject(i), type);
};
return queryForList(q, mapper, args);
}
public static <T> CheckedFunction<ResultSet, T, SQLException> singleResult(Class<T> type) {
return rs -> {
if (rs.next()) {
T result = convertObject(rs.getObject(1), type);
if (!rs.next()) {
return result;
}
}
throw new IllegalArgumentException("Expected exactly one column; discovered [" + rs.getMetaData().getColumnCount() + "]");
};
}
@SuppressWarnings("unchecked")
private static <T> T convertObject(Object val, Class<T> type) {
Object conv = null;
if (val == null) {
return null;
}
if (String.class == type) {
conv = val.toString();
}
else if (Number.class.isAssignableFrom(type)) {
Number n = (Number) val;
if (Integer.class == type) {
conv = Integer.valueOf(n.intValue());
}
else if (Long.class == type) {
conv = Long.valueOf(n.longValue());
}
else {
throw new IllegalStateException("Unknown type");
}
}
return (T) conv;
}
public List<Map<String, Object>> queryForList(String q, Object... args) throws Exception {
return queryForList(q, (rs, i) -> {
ResultSetMetaData metaData = rs.getMetaData();
int count = metaData.getColumnCount();
Map<String, Object> map = new LinkedHashMap<>(count);
for (int j = 1; j <= count; j++) {
map.put(metaData.getColumnName(j), rs.getObject(j));
}
return map;
}, args);
}
@FunctionalInterface
public interface CheckedBiFunction<T, U, R, E extends Exception> {
R apply(T t, U u) throws E;
}
}

View File

@ -6,21 +6,36 @@
package org.elasticsearch.xpack.sql.jdbc.framework; package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.CheckedSupplier;
import org.h2.Driver;
import org.junit.rules.ExternalResource; import org.junit.rules.ExternalResource;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Properties; import java.util.Properties;
import java.util.TimeZone; import java.util.TimeZone;
public class LocalH2 extends ExternalResource implements CheckedSupplier<Connection, SQLException> { public class LocalH2 extends ExternalResource implements CheckedSupplier<Connection, SQLException> {
static {
try {
// Initialize h2 so we can use it for testing
Class.forName("org.h2.Driver");
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
/**
* Creates an in memory anonymous database and returns the only connection to it.
* Closing the connection will remove the db.
*/
public static Connection anonymousDb() throws SQLException {
return DriverManager.getConnection("jdbc:h2:mem:;DATABASE_TO_UPPER=false;ALIAS_COLUMN_NAME=true");
}
private static final Properties DEFAULTS = new Properties();
private final Driver driver = Driver.load();
// add any defaults in here
private final Properties DEFAULTS = new Properties();
private final String url; private final String url;
// H2 in-memory will keep the db alive as long as the connection is opened // H2 in-memory will keep the db alive as long as this connection is opened
private Connection keepAlive; private Connection keepAlive;
/* /*
@ -43,8 +58,8 @@ public class LocalH2 extends ExternalResource implements CheckedSupplier<Connect
keepAlive = get(); keepAlive = get();
TimeZone tz = TimeZone.getDefault(); TimeZone tz = TimeZone.getDefault();
try { try {
TimeZone.setDefault(TimeZone.getTimeZone("UTC")); TimeZone.setDefault(TimeZone.getTimeZone("UTC")); // NOCOMMIT requires permissions we'd rather not grant
keepAlive.createStatement().execute("RUNSCRIPT FROM 'classpath:/h2-setup.sql'"); keepAlive.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'");
} finally { } finally {
TimeZone.setDefault(tz); TimeZone.setDefault(tz);
} }
@ -61,6 +76,6 @@ public class LocalH2 extends ExternalResource implements CheckedSupplier<Connect
@Override @Override
public Connection get() throws SQLException { public Connection get() throws SQLException {
return driver.connect(url, DEFAULTS); return DriverManager.getConnection(url, DEFAULTS);
} }
} }

View File

@ -5,15 +5,16 @@
*/ */
package org.elasticsearch.xpack.sql.jdbc.framework; package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.common.io.PathUtils;
import org.junit.Assert; import org.junit.AfterClass;
import org.junit.ClassRule; import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
@ -22,26 +23,51 @@ import java.util.Map;
import static java.lang.String.format; import static java.lang.String.format;
public abstract class SpecBaseIntegrationTestCase extends ESTestCase { /**
* Tests that compare the Elasticsearch JDBC client to some other JDBC client
* after loading a specific set of test data.
*/
public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCase {
protected static final String PARAM_FORMATTNG = "%0$s.test%2$s"; protected static final String PARAM_FORMATTNG = "%0$s.test%2$s";
private static boolean haveSetupTestData;
protected final String groupName; protected final String groupName;
protected final String testName; protected final String testName;
protected final Integer lineNumber; protected final Integer lineNumber;
protected final Path source; protected final Path source;
@ClassRule @BeforeClass
public static EsJdbcServer ES = new EsJdbcServer(); public static void clearSetupTestData() {
haveSetupTestData = false;
}
// @Before
// This typically is uncommented when starting up a new instance of ES public void setupTestData() throws Exception {
// if (haveSetupTestData) {
// @BeforeClass // We only need to load the test data once
// public static void start() throws Exception { return;
// TestUtils.loadDatasetInEs(TestUtils.restClient("localhost", 9200)); }
// System.out.println("Loaded dataset in ES"); loadDatasetIntoEs();
// } haveSetupTestData = true;
}
@AfterClass
public static void cleanupTestData() throws IOException {
try {
client().performRequest("DELETE", "/*");
} catch (ResponseException e) {
if (e.getResponse().getStatusLine().getStatusCode() != 404) {
// 404 means no indices which shouldn't cause a failure
throw e;
}
}
}
@Override
protected boolean preserveIndicesUponCompletion() {
return true;
}
public SpecBaseIntegrationTestCase(String groupName, String testName, Integer lineNumber, Path source) { public SpecBaseIntegrationTestCase(String groupName, String testName, Integer lineNumber, Path source) {
this.groupName = groupName; this.groupName = groupName;
@ -50,10 +76,6 @@ public abstract class SpecBaseIntegrationTestCase extends ESTestCase {
this.source = source; this.source = source;
} }
public Connection esCon() throws Exception {
return ES.get();
}
protected Throwable reworkException(Throwable th) { protected Throwable reworkException(Throwable th) {
StackTraceElement[] stackTrace = th.getStackTrace(); StackTraceElement[] stackTrace = th.getStackTrace();
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1]; StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
@ -63,14 +85,14 @@ public abstract class SpecBaseIntegrationTestCase extends ESTestCase {
th.setStackTrace(redone); th.setStackTrace(redone);
return th; return th;
} }
// //
// spec reader // spec reader
// //
// returns groupName, testName, its line location, its source and the custom object (based on each test parser) // returns groupName, testName, its line location, its source and the custom object (based on each test parser)
protected static List<Object[]> readScriptSpec(String url, Parser parser) throws Exception { protected static List<Object[]> readScriptSpec(String url, Parser parser) throws Exception {
Path source = Paths.get(TestUtils.class.getResource(url).toURI()); Path source = PathUtils.get(SpecBaseIntegrationTestCase.class.getResource(url).toURI());
String fileName = source.getFileName().toString(); String fileName = source.getFileName().toString();
int dot = fileName.indexOf("."); int dot = fileName.indexOf(".");
String groupName = dot > 0 ? fileName.substring(0, dot) : fileName; String groupName = dot > 0 ? fileName.substring(0, dot) : fileName;
@ -105,7 +127,7 @@ public abstract class SpecBaseIntegrationTestCase extends ESTestCase {
} }
} }
} }
Assert.assertNull("Cannot find spec for test " + testName, testName); assertNull("Cannot find spec for test " + testName, testName);
return pairs; return pairs;
} }

View File

@ -1,132 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.elasticsearch.xpack.sql.jdbc.h2.SqlSpecIntegrationTest;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import java.util.TimeZone;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
public abstract class TestUtils {
static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
public static RestClient restClient(String host, int port) {
return RestClient.builder(new HttpHost(host, port)).build();
}
public static RestClient restClient(InetAddress address) {
return RestClient.builder(new HttpHost(address)).build();
}
public static Client client() {
return new PreBuiltTransportClient(Settings.EMPTY)
.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), 9300));
}
public static void index(RestClient client, String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
body.accept(builder);
builder.endObject();
HttpEntity doc = new StringEntity(builder.string(), ContentType.APPLICATION_JSON);
client.performRequest("PUT", "/" + index + "/doc/1", singletonMap("refresh", "true"), doc);
}
public static void loadDatasetInEs(RestClient client) throws Exception {
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
createIndex.startObject("settings"); {
createIndex.field("number_of_shards", 1);
}
createIndex.endObject();
createIndex.startObject("mappings"); {
createIndex.startObject("emp");
{
createIndex.startObject("properties"); {
createIndex.startObject("emp_no").field("type", "integer").endObject();
createIndex.startObject("birth_date").field("type", "date").endObject();
createIndex.startObject("first_name").field("type", "text").endObject();
createIndex.startObject("last_name").field("type", "text").endObject();
createIndex.startObject("gender").field("type", "keyword").endObject();
createIndex.startObject("hire_date").field("type", "date").endObject();
}
createIndex.endObject();
}
createIndex.endObject();
}
createIndex.endObject().endObject();
client.performRequest("PUT", "/test_emp", emptyMap(), new StringEntity(createIndex.string(), ContentType.APPLICATION_JSON));
StringBuilder bulk = new StringBuilder();
csvToLines("employees", (titles, fields) -> {
bulk.append("{\"index\":{}}\n");
bulk.append('{');
for (int f = 0; f < fields.size(); f++) {
if (f != 0) {
bulk.append(',');
}
bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"');
}
bulk.append("}\n");
});
client.performRequest("POST", "/test_emp/emp/_bulk", singletonMap("refresh", "true"), new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
}
private static void csvToLines(String name, CheckedBiConsumer<List<String>, List<String>, Exception> consumeLine) throws Exception {
String location = "/" + name + ".csv";
URL dataSet = SqlSpecIntegrationTest.class.getResource(location);
if (dataSet == null) {
throw new IllegalArgumentException("Can't find [" + location + "]");
}
List<String> lines = Files.readAllLines(PathUtils.get(dataSet.toURI()));
if (lines.isEmpty()) {
throw new IllegalArgumentException("[" + location + "] must contain at least a title row");
}
List<String> titles = Arrays.asList(lines.get(0).split(","));
for (int l = 1; l < lines.size(); l++) {
consumeLine.accept(titles, Arrays.asList(lines.get(l).split(",")));
}
}
Throwable reworkException(Throwable th, Class<?> testSuite, String testName, Path source, int lineNumber) {
StackTraceElement[] stackTrace = th.getStackTrace();
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
System.arraycopy(stackTrace, 0, redone, 1, stackTrace.length);
redone[0] = new StackTraceElement(testSuite.getName(), testName, source.getFileName().toString(), lineNumber);
th.setStackTrace(redone);
return th;
}
}

View File

@ -1,10 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/**
* Tests comparing our jdbc client to H2's jdbc client.
*/
package org.elasticsearch.xpack.sql.jdbc.h2;

View File

@ -6,11 +6,11 @@
showFunctions showFunctions
SHOW FUNCTIONS; SHOW FUNCTIONS;
name | type name | type
AVG |AGGREGATE AVG |AGGREGATE
COUNT |AGGREGATE COUNT |AGGREGATE
MAX |AGGREGATE MAX |AGGREGATE
MIN |AGGREGATE MIN |AGGREGATE
SUM |AGGREGATE SUM |AGGREGATE
MEAN |AGGREGATE MEAN |AGGREGATE
STDDEV_POP |AGGREGATE STDDEV_POP |AGGREGATE
@ -18,96 +18,103 @@ VAR_POP |AGGREGATE
SUM_OF_SQUARES |AGGREGATE SUM_OF_SQUARES |AGGREGATE
SKEWNESS |AGGREGATE SKEWNESS |AGGREGATE
KURTOSIS |AGGREGATE KURTOSIS |AGGREGATE
DAY_OF_MONTH |SCALAR DAY_OF_MONTH |SCALAR
DAY |SCALAR DAY |SCALAR
DOM |SCALAR DOM |SCALAR
DAY_OF_WEEK |SCALAR DAY_OF_WEEK |SCALAR
DOW |SCALAR DOW |SCALAR
DAY_OF_YEAR |SCALAR DAY_OF_YEAR |SCALAR
DOY |SCALAR DOY |SCALAR
HOUR_OF_DAY |SCALAR HOUR_OF_DAY |SCALAR
HOUR |SCALAR HOUR |SCALAR
MINUTE_OF_DAY |SCALAR MINUTE_OF_DAY |SCALAR
MINUTE_OF_HOUR |SCALAR MINUTE_OF_HOUR |SCALAR
MINUTE |SCALAR MINUTE |SCALAR
SECOND_OF_MINUTE|SCALAR SECOND_OF_MINUTE|SCALAR
SECOND |SCALAR SECOND |SCALAR
MONTH_OF_YEAR |SCALAR MONTH_OF_YEAR |SCALAR
MONTH |SCALAR MONTH |SCALAR
YEAR |SCALAR YEAR |SCALAR
ABS |SCALAR ABS |SCALAR
ACOS |SCALAR ACOS |SCALAR
ASIN |SCALAR ASIN |SCALAR
ATAN |SCALAR ATAN |SCALAR
CBRT |SCALAR CBRT |SCALAR
CEIL |SCALAR CEIL |SCALAR
COS |SCALAR COS |SCALAR
COSH |SCALAR COSH |SCALAR
DEGREES |SCALAR DEGREES |SCALAR
E |SCALAR E |SCALAR
EXP |SCALAR EXP |SCALAR
EXPM1 |SCALAR EXPM1 |SCALAR
FLOOR |SCALAR FLOOR |SCALAR
LOG |SCALAR LOG |SCALAR
LOG10 |SCALAR LOG10 |SCALAR
PI |SCALAR PI |SCALAR
RADIANS |SCALAR RADIANS |SCALAR
ROUND |SCALAR ROUND |SCALAR
SIN |SCALAR SIN |SCALAR
SINH |SCALAR SINH |SCALAR
SQRT |SCALAR SQRT |SCALAR
TAN |SCALAR TAN |SCALAR
; ;
showFunctionsWithExactMatch showFunctionsWithExactMatch
SHOW FUNCTIONS LIKE 'ABS'; SHOW FUNCTIONS LIKE 'ABS';
name | type name | type
ABS |SCALAR ABS |SCALAR
; ;
showFunctionsWithPatternWildcard showFunctionsWithPatternWildcard
SHOW FUNCTIONS LIKE 'A%'; SHOW FUNCTIONS LIKE 'A%';
name | type name | type
AVG |AGGREGATE AVG |AGGREGATE
ABS |SCALAR ABS |SCALAR
ACOS |SCALAR ACOS |SCALAR
ASIN |SCALAR ASIN |SCALAR
ATAN |SCALAR ATAN |SCALAR
; ;
showFunctionsWithPatternChar showFunctionsWithPatternChar
SHOW FUNCTIONS LIKE 'A__'; SHOW FUNCTIONS LIKE 'A__';
name | type name | type
AVG |AGGREGATE AVG |AGGREGATE
ABS |SCALAR ABS |SCALAR
; ;
showFunctions showFunctions
SHOW FUNCTIONS '%DAY%'; SHOW FUNCTIONS '%DAY%';
name | type name | type
DAY_OF_MONTH |SCALAR DAY_OF_MONTH |SCALAR
DAY |SCALAR DAY |SCALAR
DAY_OF_WEEK |SCALAR DAY_OF_WEEK |SCALAR
DAY_OF_YEAR |SCALAR DAY_OF_YEAR |SCALAR
HOUR_OF_DAY |SCALAR HOUR_OF_DAY |SCALAR
MINUTE_OF_DAY |SCALAR MINUTE_OF_DAY |SCALAR
;
showTables
SHOW TABLES;
index | type
test_emp |emp
; ;
// DESCRIBE // DESCRIBE
describe describe
DESCRIBE "test_emp.emp"; DESCRIBE "test_emp.emp";
column | type column | type
birth_date |TIMESTAMP birth_date |TIMESTAMP
emp_no |INTEGER emp_no |INTEGER
first_name |VARCHAR first_name |VARCHAR
gender |VARCHAR gender |VARCHAR
hire_date |TIMESTAMP hire_date |TIMESTAMP
last_name |VARCHAR last_name |VARCHAR
; ;

View File

@ -1,4 +1,4 @@
grant { grant {
// Policy is required for tests to connect to testing Elasticsearch instance. // Required for tests to connect to testing Elasticsearch instance.
permission java.net.SocketPermission "*", "connect,resolve"; permission java.net.SocketPermission "*", "connect,resolve";
}; };

View File

@ -0,0 +1,29 @@
CREATE TABLE mock (
TABLE_SCHEM VARCHAR,
TABLE_NAME VARCHAR,
COLUMN_NAME VARCHAR,
DATA_TYPE INTEGER,
TYPE_NAME VARCHAR,
COLUMN_SIZE INTEGER,
BUFFER_LENGTH NULL,
DECIMAL_DIGITS INTEGER,
NUM_PREC_RADIX INTEGER,
NULLABLE INTEGER,
REMARKS VARCHAR,
COLUMN_DEF VARCHAR,
SQL_DATA_TYPE INTEGER,
SQL_DATETIME_SUB INTEGER,
CHAR_OCTET_LENGTH INTEGER,
ORDINAL_POSITION INTEGER,
IS_NULLABLE VARCHAR,
SCOPE_CATALOG VARCHAR,
SCOPE_SCHEMA VARCHAR,
SCOPE_TABLE VARCHAR,
SOURCE_DATA_TYPE SMALLINT,
IS_AUTOINCREMENT VARCHAR,
IS_GENERATEDCOLUMN VARCHAR
) AS SELECT '', 'test.doc', 'name', 12, 'VARCHAR', 1, null, null,
10, -- NOCOMMIT 10 seem wrong to hard code for stuff like strings
2, -- columnNullableUnknown NOCOMMIT I think it'd be more correct to return columnNullable
null, null, null, null, null, 1, '', null, null, null, null, '', ''
FROM DUAL;

View File

@ -0,0 +1,22 @@
CREATE TABLE mock (
PROCEDURE_CAT VARCHAR,
PROCEDURE_SCHEM VARCHAR,
PROCEDURE_NAME VARCHAR,
COLUMN_NAME VARCHAR,
COLUMN_TYPE SMALLINT,
DATA_TYPE INTEGER,
TYPE_NAME VARCHAR,
PRECISION INTEGER,
LENGTH INTEGER,
SCALE SMALLINT,
RADIX SMALLINT,
NULLABLE SMALLINT,
REMARKS VARCHAR,
COLUMN_DEF VARCHAR,
SQL_DATA_TYPE INTEGER,
SQL_DATETIME_SUB INTEGER,
CHAR_OCTET_LENGTH INTEGER,
ORDINAL_POSITION INTEGER,
IS_NULLABLE VARCHAR,
SPECIFIC_NAME VARCHAR
);

View File

@ -0,0 +1,11 @@
CREATE TABLE mock (
PROCEDURE_CAT VARCHAR,
PROCEDURE_SCHEM VARCHAR,
PROCEDURE_NAME VARCHAR,
NUM_INPUT_PARAMS INTEGER,
NUM_OUTPUT_PARAMS INTEGER,
NUM_RESULT_SETS INTEGER,
REMARKS VARCHAR,
PROCEDURE_TYPE SMALLINT,
SPECIFIC_NAME VARCHAR
);

View File

@ -0,0 +1,11 @@
CREATE TABLE mock (
TABLE_SCHEM VARCHAR,
TABLE_NAME VARCHAR,
TABLE_TYPE VARCHAR,
REMARKS VARCHAR,
TYPE_CAT VARCHAR,
TYPE_SCHEM VARCHAR,
TYPE_NAME VARCHAR,
SELF_REFERENCING_COL_NAME VARCHAR,
REF_GENERATION VARCHAR
) AS SELECT '', 'test.doc', 'TABLE', '', null, null, null, null, null FROM DUAL;

View File

@ -0,0 +1,4 @@
CREATE TABLE mock (
"index" VARCHAR,
"type" VARCHAR
);

View File

@ -5,7 +5,7 @@
*/ */
package org.elasticsearch.xpack.sql.analysis.catalog; package org.elasticsearch.xpack.sql.analysis.catalog;
import java.util.Collection; import java.util.List;
public interface Catalog { public interface Catalog {
@ -15,15 +15,15 @@ public interface Catalog {
boolean indexExists(String index); boolean indexExists(String index);
Collection<EsIndex> listIndices(); List<EsIndex> listIndices();
Collection<EsIndex> listIndices(String pattern); List<EsIndex> listIndices(String pattern);
EsType getType(String index, String type); EsType getType(String index, String type);
boolean typeExists(String index, String type); boolean typeExists(String index, String type);
Collection<EsType> listTypes(String index); List<EsType> listTypes(String index);
Collection<EsType> listTypes(String index, String pattern); List<EsType> listTypes(String index, String pattern);
} }

View File

@ -14,7 +14,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -54,12 +53,12 @@ public class EsCatalog implements Catalog {
} }
@Override @Override
public Collection<EsIndex> listIndices() { public List<EsIndex> listIndices() {
return listIndices(null); return listIndices(null);
} }
@Override @Override
public Collection<EsIndex> listIndices(String pattern) { public List<EsIndex> listIndices(String pattern) {
Iterator<IndexMetaData> indexMetadata = null; Iterator<IndexMetaData> indexMetadata = null;
MetaData md = metadata(); MetaData md = metadata();
if (pattern == null) { if (pattern == null) {
@ -97,12 +96,12 @@ public class EsCatalog implements Catalog {
} }
@Override @Override
public Collection<EsType> listTypes(String index) { public List<EsType> listTypes(String index) {
return listTypes(index, null); return listTypes(index, null);
} }
@Override @Override
public Collection<EsType> listTypes(String indexPattern, String pattern) { public List<EsType> listTypes(String indexPattern, String pattern) {
if (!Strings.hasText(indexPattern)) { if (!Strings.hasText(indexPattern)) {
indexPattern = WILDCARD; indexPattern = WILDCARD;
} }

View File

@ -51,7 +51,7 @@ public class EsIndex {
return settings; return settings;
} }
static Collection<EsIndex> build(Iterator<IndexMetaData> metadata) { static List<EsIndex> build(Iterator<IndexMetaData> metadata) {
if (metadata == null || !metadata.hasNext()) { if (metadata == null || !metadata.hasNext()) {
return emptyList(); return emptyList();
} }

View File

@ -5,15 +5,15 @@
*/ */
package org.elasticsearch.xpack.sql.analysis.catalog; package org.elasticsearch.xpack.sql.analysis.catalog;
import java.util.Collection; import org.elasticsearch.xpack.sql.util.StringUtils;
import java.util.ArrayList;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.elasticsearch.xpack.sql.util.StringUtils;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toList;
@ -48,12 +48,12 @@ public abstract class InMemoryCatalog implements Catalog {
} }
@Override @Override
public Collection<EsIndex> listIndices() { public List<EsIndex> listIndices() {
return indices.values(); return new ArrayList<>(indices.values());
} }
@Override @Override
public Collection<EsIndex> listIndices(String pattern) { public List<EsIndex> listIndices(String pattern) {
Pattern p = StringUtils.likeRegex(pattern); Pattern p = StringUtils.likeRegex(pattern);
return indices.entrySet().stream() return indices.entrySet().stream()
.filter(e -> p.matcher(e.getKey()).matches()) .filter(e -> p.matcher(e.getKey()).matches())
@ -78,13 +78,13 @@ public abstract class InMemoryCatalog implements Catalog {
} }
@Override @Override
public Collection<EsType> listTypes(String index) { public List<EsType> listTypes(String index) {
Map<String, EsType> typs = types.get(index); Map<String, EsType> typs = types.get(index);
return typs != null ? typs.values() : emptyList(); return typs != null ? new ArrayList<>(typs.values()) : emptyList();
} }
@Override @Override
public Collection<EsType> listTypes(String index, String pattern) { public List<EsType> listTypes(String index, String pattern) {
Map<String, EsType> typs = types.get(index); Map<String, EsType> typs = types.get(index);
if (typs == null) { if (typs == null) {
return emptyList(); return emptyList();

View File

@ -5,10 +5,6 @@
*/ */
package org.elasticsearch.xpack.sql.plan.logical.command; package org.elasticsearch.xpack.sql.plan.logical.command;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import org.elasticsearch.xpack.sql.analysis.catalog.EsType; import org.elasticsearch.xpack.sql.analysis.catalog.EsType;
import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
@ -18,7 +14,12 @@ import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataTypes; import org.elasticsearch.xpack.sql.type.DataTypes;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toList;
public class ShowTables extends Command { public class ShowTables extends Command {
@ -47,8 +48,10 @@ public class ShowTables extends Command {
@Override @Override
protected RowSetCursor execute(SqlSession session) { protected RowSetCursor execute(SqlSession session) {
Collection<EsType> types = session.catalog().listTypes(index, pattern); List<EsType> types = session.catalog().listTypes(index, pattern);
// Consistent sorting is nice both for testing and humans
Collections.sort(types, comparing(EsType::index).thenComparing(EsType::name));
return Rows.of(output(), types.stream() return Rows.of(output(), types.stream()
.map(t -> asList(t.index(), t.name())) .map(t -> asList(t.index(), t.name()))
.collect(toList())); .collect(toList()));

View File

@ -13,8 +13,19 @@ dependencies {
* production code. */ * production code. */
dependencyLicenses.enabled = false dependencyLicenses.enabled = false
forbiddenApisMain { // Allow for com.sun.net.httpserver.* usage for testing
//we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage eclipse {
classpath.file {
whenMerged { cp ->
def con = entries.find { e ->
e.kind == "con" && e.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER")
}
con.accessRules.add(new org.gradle.plugins.ide.eclipse.model.AccessRule(
"accessible", "com/sun/net/httpserver/*"))
}
}
}
forbiddenApisTest {
bundledSignatures -= 'jdk-non-portable' bundledSignatures -= 'jdk-non-portable'
bundledSignatures += 'jdk-internal' bundledSignatures += 'jdk-internal'
} }