Add to CSV infra plus some refactoring of the JDBC one

Original commit: elastic/x-pack-elasticsearch@3e002c91c2
This commit is contained in:
Costin Leau 2017-07-03 21:01:29 +03:00
parent bc9c9a9151
commit 3eb1258b0d
38 changed files with 805 additions and 643 deletions

View File

@ -14,7 +14,7 @@ dependencies {
testCompile project(':x-pack-elasticsearch:sql:server')
runtime "org.fusesource.jansi:jansi:1.16"
runtime "org.elasticsearch:jna:4.4.0"
runtime "org.elasticsearch:jna:4.4.0-1"
}
dependencyLicenses {

View File

@ -27,8 +27,7 @@ class CliProtoHandler extends ProtoHandler<Response> {
CliProtoHandler(Client client) {
super(client, ProtoUtils::readHeader, CliServerProtoUtils::write);
this.server = new CliServer(TestUtils.planExecutor(client), clusterName, () -> info.getNode().getName(), info.getVersion(),
info.getBuild());
this.server = new CliServer(TestUtils.planExecutor(client), clusterName, () -> info.getNode().getName(), info.getVersion(), info.getBuild());
}
@Override

View File

@ -51,7 +51,7 @@ dependencies {
// Used by the hack to run InternalTestCluster if not running against a gradle-started cluster.
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
testRuntime "com.h2database:org.elasticsearch.xpack.sql.jdbc.h2:1.4.194"
testRuntime "com.h2database:h2:1.4.194"
testRuntime "net.sourceforge.csvjdbc:csvjdbc:1.0.31"
}

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import java.sql.Connection;
import java.sql.SQLException;
@ -18,11 +18,12 @@ import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.xpack.sql.jdbc.framework.TestUtils.index;
/**
* Test the jdbc driver behavior and the connection to Elasticsearch.
*/
public class BasicsIT extends JdbcIntegrationTestCase {
public class BasicsIT extends SpecBaseIntegrationTestCase {
// NOCOMMIT these might should move into their own test or be deleted entirely
// public void test01Ping() throws Exception {

View File

@ -5,7 +5,7 @@
*/
package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.startsWith;
/**
* Tests for our implementation of {@link DatabaseMetaData}.
*/
public class DatabaseMetaDataIT extends JdbcIntegrationTestCase {
public class DatabaseMetaDataIT extends SpecBaseIntegrationTestCase {
/**
* We do not support procedures so we return an empty set for {@link DatabaseMetaData#getProcedures(String, String, String)}.
*/

View File

@ -0,0 +1,155 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.csv;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.xpack.sql.jdbc.framework.CsvSpecTableReader;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase.Parser;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import org.junit.AfterClass;
import org.junit.Test;
import org.relique.jdbc.csv.CsvDriver;
import java.io.Reader;
import java.io.StringReader;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
public class CsvSpecIntegrationTest extends SpecBaseIntegrationTestCase {
private static CsvDriver DRIVER = new CsvDriver();
public static final Map<Connection, Reader> CSV_READERS = new LinkedHashMap<>();
private final CsvFragment fragment;
@AfterClass
public static void cleanup() throws Exception {
CSV_READERS.clear();
}
public static CheckedSupplier<Connection, SQLException> csvCon(Properties props, Reader reader) {
return new CheckedSupplier<Connection, SQLException>() {
@Override
public Connection get() throws SQLException {
Connection con = DRIVER.connect("jdbc:relique:csv:class:" + CsvSpecTableReader.class.getName(), props);
CSV_READERS.put(con, reader);
return con;
}
};
}
@ParametersFactory(shuffle = false, argumentFormatting = "name=%1s")
public static List<Object[]> readScriptSpec() throws Exception {
CsvSpecParser parser = new CsvSpecParser();
return CollectionUtils.combine(
readScriptSpec("/command.csv-spec", parser),
readScriptSpec("/fulltext.csv-spec", parser));
}
public CsvSpecIntegrationTest(String groupName, String testName, Integer lineNumber, Path source, CsvFragment fragment) {
super(groupName, testName, lineNumber, source);
this.fragment = fragment;
}
@Test
public void testQuery() throws Throwable {
// hook CSV reader, which picks the current test context
try (Connection csv = csvCon(fragment.asProps(), fragment.reader).get();
Connection es = esCon()) {
ResultSet expected, actual;
try {
// pass the testName as table for debugging purposes (in case the underlying reader is missing)
expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY).executeQuery("SELECT * FROM " + testName);
// trigger data loading for type inference
expected.beforeFirst();
actual = es.createStatement().executeQuery(fragment.query);
assertResultSets(expected, actual);
} catch (AssertionError ae) {
throw reworkException(new AssertionError(errorMessage(ae), ae.getCause()));
}
} catch (Throwable th) {
throw new RuntimeException(errorMessage(th), th);
}
}
String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber, fragment.query, th.getMessage());
}
private static class CsvSpecParser implements Parser {
private final StringBuilder data = new StringBuilder();
private CsvFragment fragment;
@Override
public Object parse(String line) {
// beginning of the section
if (fragment == null) {
// pick up the query
fragment = new CsvFragment();
fragment.query = line.endsWith(";") ? line.substring(0, line.length() - 1) : line;
}
else {
// read CSV header
// if (fragment.columnNames == null) {
// fragment.columnNames = line;
// }
// read data
if (line.startsWith(";")) {
CsvFragment f = fragment;
f.reader = new StringReader(data.toString());
// clean-up
fragment = null;
data.setLength(0);
return f;
}
else {
data.append(line);
data.append("\r\n");
}
}
return null;
}
}
private static class CsvFragment {
String query;
String columnNames;
List<String> columnTypes;
Reader reader;
private static final Properties DEFAULT = new Properties();
static {
DEFAULT.setProperty("charset", "UTF-8");
// trigger auto-detection
DEFAULT.setProperty("columnTypes", "");
DEFAULT.setProperty("separator", "|");
DEFAULT.setProperty("trimValues", "true");
}
Properties asProps() {
// p.setProperty("suppressHeaders", "true");
// p.setProperty("headerline", columnNames);
return DEFAULT;
}
}
}

View File

@ -1,49 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util.framework;
import java.io.Reader;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import java.util.function.Supplier;
import org.elasticsearch.xpack.sql.jdbc.integration.util.JdbcTemplate.JdbcSupplier;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.relique.jdbc.csv.CsvDriver;
@RunWith(Suite.class)
public abstract class CsvInfraSuite extends EsInfra {
private static CsvDriver DRIVER = new CsvDriver();
public static final Map<Connection, Reader> CSV_READERS = new LinkedHashMap<>();
@BeforeClass
public static void setupDB() throws Exception {
EsInfra.setupDB();
}
@AfterClass
public static void cleanup() throws Exception {
CSV_READERS.clear();
}
public static Supplier<Connection> csvCon(Properties props, Reader reader) {
return new JdbcSupplier<Connection>() {
@Override
public Connection jdbc() throws SQLException {
Connection con = DRIVER.connect("jdbc:relique:csv:class:" + CsvSpecTableReader.class.getName(), props);
CSV_READERS.put(con, reader);
return con;
}
};
}
}

View File

@ -1,115 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util.framework;
import java.io.Reader;
import java.io.StringReader;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.List;
import java.util.Locale;
import java.util.Properties;
import org.junit.Test;
import org.junit.runners.Parameterized.Parameter;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.integration.util.JdbcAssert.assertResultSets;
public abstract class CsvSpecBaseTest extends SpecBaseTest {
@Parameter(3)
public CsvFragment fragment;
protected static List<Object[]> readScriptSpec(String url) throws Exception {
return SpecBaseTest.readScriptSpec(url, new CsvSpecParser());
}
@Test
public void testQuery() throws Throwable {
try (Connection csv = CsvInfraSuite.csvCon(fragment.asProps(), fragment.reader).get();
Connection es = CsvInfraSuite.esCon().get()) {
ResultSet expected, actual;
try {
// pass the testName as table for debugging purposes (in case the underlying reader is missing)
expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY).executeQuery("SELECT * FROM " + testName);
// trigger data loading for type inference
expected.beforeFirst();
actual = es.createStatement().executeQuery(fragment.query);
assertResultSets(expected, actual);
} catch (AssertionError ae) {
throw reworkException(new AssertionError(errorMessage(ae), ae.getCause()));
}
} catch (Throwable th) {
throw new RuntimeException(errorMessage(th), th);
}
}
String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber, fragment.query, th.getMessage());
}
}
class CsvSpecParser implements SpecBaseTest.Parser {
private final StringBuilder data = new StringBuilder();
private CsvFragment fragment;
@Override
public Object parse(String line) {
// beginning of the section
if (fragment == null) {
// pick up the query
fragment = new CsvFragment();
fragment.query = line.endsWith(";") ? line.substring(0, line.length() - 1) : line;
}
else {
// read CSV header
// if (fragment.columnNames == null) {
// fragment.columnNames = line;
// }
// read data
if (line.startsWith(";")) {
CsvFragment f = fragment;
f.reader = new StringReader(data.toString());
// clean-up
fragment = null;
data.setLength(0);
return f;
}
else {
data.append(line);
data.append("\r\n");
}
}
return null;
}
}
class CsvFragment {
String query;
String columnNames;
List<String> columnTypes;
Reader reader;
private static final Properties DEFAULT = new Properties();
static {
DEFAULT.setProperty("charset", "UTF-8");
// trigger auto-detection
DEFAULT.setProperty("columnTypes", "");
DEFAULT.setProperty("separator", "|");
DEFAULT.setProperty("trimValues", "true");
}
Properties asProps() {
// p.setProperty("suppressHeaders", "true");
// p.setProperty("headerline", columnNames);
return DEFAULT;
}
}

View File

@ -3,7 +3,10 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util.framework;
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.xpack.sql.jdbc.csv.CsvSpecIntegrationTest;
import org.relique.io.TableReader;
import java.io.Reader;
import java.sql.Connection;
@ -11,13 +14,11 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import org.relique.io.TableReader;
public class CsvSpecTableReader implements TableReader {
@Override
public Reader getReader(Statement statement, String tableName) throws SQLException {
Reader reader = CsvInfraSuite.CSV_READERS.remove(statement.getConnection());
Reader reader = CsvSpecIntegrationTest.CSV_READERS.remove(statement.getConnection());
if (reader == null) {
throw new RuntimeException("Cannot find reader for test " + tableName);
}

View File

@ -1,50 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util.framework;
import java.sql.Connection;
import java.util.function.Supplier;
import org.elasticsearch.xpack.sql.jdbc.integration.util.EsDataLoader;
import org.elasticsearch.xpack.sql.jdbc.integration.util.EsJdbcServer;
import org.elasticsearch.xpack.sql.jdbc.integration.util.JdbcTemplate;
import org.junit.ClassRule;
import static org.junit.Assert.assertNotNull;
public class EsInfra {
//
// REMOTE ACCESS
//
private static boolean REMOTE = true;
@ClassRule
public static EsJdbcServer ES_JDBC_SERVER = new EsJdbcServer(REMOTE, false);
private static JdbcTemplate ES_JDBC;
public static void setupDB() throws Exception {
//ES_CON = new JdbcTemplate(ES_JDBC_SERVER);
if (!REMOTE) {
setupES();
}
}
private static void setupES() throws Exception {
EsDataLoader.loadData();
}
public static Supplier<Connection> esCon() {
return ES_JDBC_SERVER;
}
public static JdbcTemplate es() {
assertNotNull("ES connection null - make sure the suite is ran", ES_JDBC);
return ES_JDBC;
}
}

View File

@ -0,0 +1,66 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver;
import org.junit.rules.ExternalResource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import static org.junit.Assert.assertNotNull;
public class EsJdbcServer extends ExternalResource implements CheckedSupplier<Connection, SQLException> {
private JdbcHttpServer server;
private String jdbcUrl;
private JdbcDriver driver;
private final Properties properties;
public EsJdbcServer() {
this(false);
}
public EsJdbcServer(boolean debug) {
properties = new Properties();
if (debug) {
properties.setProperty("debug", "true");
}
}
@Override
protected void before() throws Throwable {
server = new JdbcHttpServer(TestUtils.client());
driver = new JdbcDriver();
server.start(0);
jdbcUrl = server.url();
System.out.println("Started JDBC Server at " + jdbcUrl);
}
@Override
protected void after() {
server.stop();
server = null;
System.out.println("Stopped JDBC Server at " + jdbcUrl);
}
public Client client() {
assertNotNull("ES JDBC Server is null - make sure ES is properly run as a @ClassRule", driver);
return server.client();
}
@Override
public Connection get() throws SQLException {
assertNotNull("ES JDBC Server is null - make sure ES is properly run as a @ClassRule", driver);
return driver.connect(jdbcUrl, properties);
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.compare;
package org.elasticsearch.xpack.sql.jdbc.framework;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
@ -14,7 +14,6 @@ import java.time.Instant;
import java.util.Locale;
import java.util.TimeZone;
import static org.elasticsearch.xpack.sql.jdbc.compare.CompareToH2BaseTestCase.UTC_FORMATTER;
import static org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcUtils.nameOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@ -85,8 +84,8 @@ public class JdbcAssert {
* locale and time zone for date functions.
*/
msg += " locale is [" + Locale.getDefault() + "] and time zone is [" + TimeZone.getDefault() + "]";
expectedObject = UTC_FORMATTER.format(Instant.ofEpochMilli(((Timestamp) expectedObject).getTime()));
actualObject = UTC_FORMATTER.format(Instant.ofEpochMilli(((Timestamp) actualObject).getTime()));
expectedObject = TestUtils.UTC_FORMATTER.format(Instant.ofEpochMilli(((Timestamp) expectedObject).getTime()));
actualObject = TestUtils.UTC_FORMATTER.format(Instant.ofEpochMilli(((Timestamp) actualObject).getTime()));
// NOCOMMIT look at ResultSet.getTimestamp(int, Calendar)
}

View File

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.client.Client;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Response;
import org.elasticsearch.xpack.sql.test.server.ProtoHttpServer;
public class JdbcHttpServer extends ProtoHttpServer<Response> {
public JdbcHttpServer(Client client) {
super(client, new SqlProtoHandler(client), "/jdbc/", "sql/");
}
@Override
public String url() {
return "jdbc:es://" + super.url();
}
}

View File

@ -5,47 +5,37 @@
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.painless.PainlessPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.NodeConfigurationSource;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.transport.Netty4Plugin;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver;
import org.elasticsearch.xpack.sql.jdbc.util.IOUtils;
import org.elasticsearch.xpack.sql.net.client.SuppressForbidden;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.elasticsearch.xpack.sql.util.StringUtils;
import org.junit.rules.ExternalResource;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collection;
import java.util.TimeZone;
import java.util.function.Function;
import static java.util.Collections.emptySet;
import static java.util.Collections.singletonMap;
import static org.apache.lucene.util.LuceneTestCase.createTempDir;
import static org.apache.lucene.util.LuceneTestCase.random;
import static org.elasticsearch.test.ESTestCase.randomBoolean;
import static org.elasticsearch.test.ESTestCase.randomLong;
public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
static {
// Initialize the jdbc driver
JdbcDriver.jdbcMajorVersion();
}
private static InternalTestCluster internalTestCluster;
/**
* Hack to run an {@link InternalTestCluster} if this is being run
@ -53,13 +43,15 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
* debugging. Note that this doesn't work in the security manager is
* enabled.
*/
@BeforeClass
public class LocalEsCluster extends ExternalResource implements CheckedSupplier<Connection, SQLException> {
private InternalTestCluster internalTestCluster;
private RestClient client;
private String serverAddress = StringUtils.EMPTY;
@Override
@SuppressForbidden(reason = "it is a hack anyway")
public static void startInternalTestClusterIfNeeded() throws IOException, InterruptedException {
if (System.getProperty("tests.rest.cluster") != null) {
// Nothing to do, using an external Elasticsearch node.
return;
}
protected void before() throws Throwable {
long seed = randomLong();
String name = InternalTestCluster.clusterName("", seed);
NodeConfigurationSource config = new NodeConfigurationSource() {
@ -92,8 +84,7 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
return Arrays.asList(Netty4Plugin.class, XPackPlugin.class, PainlessPlugin.class);
}
};
internalTestCluster = new InternalTestCluster(seed, createTempDir(), false, true, 1, 1, name, config, 0, randomBoolean(), "",
emptySet(), Function.identity());
internalTestCluster = new InternalTestCluster(seed, createTempDir(), false, true, 1, 1, name, config, 0, randomBoolean(), "", emptySet(), Function.identity());
internalTestCluster.beforeTest(random(), 0);
internalTestCluster.ensureAtLeastNumDataNodes(1);
InetSocketAddress httpBound = internalTestCluster.httpAddresses()[0];
@ -101,32 +92,36 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
try {
System.setProperty("tests.rest.cluster", http);
} catch (SecurityException e) {
throw new RuntimeException(
"Failed to set system property required for tests. Security manager must be disabled to use this hack.", e);
}
throw new RuntimeException("Failed to set system property required for tests. Security manager must be disabled to use this hack.", e);
}
@AfterClass
public static void shutDownInternalTestClusterIfNeeded() {
client = TestUtils.restClient(httpBound.getAddress());
// load data
TestUtils.loadDatasetInEs(client);
serverAddress = httpBound.getAddress().getHostAddress();
}
@Override
protected void after() {
serverAddress = StringUtils.EMPTY;
if (internalTestCluster == null) {
return;
}
internalTestCluster.close();
IOUtils.close(client);
IOUtils.close(internalTestCluster);
}
protected JdbcTemplate j;
@Before
public void setupJdbcTemplate() throws Exception {
j = new JdbcTemplate(() -> DriverManager.getConnection(
"jdbc:es://" + System.getProperty("tests.rest.cluster") + "/?time_zone=" + TimeZone.getDefault().getID()));
public RestClient client() {
return client;
}
protected void index(String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
body.accept(builder);
builder.endObject();
HttpEntity doc = new StringEntity(builder.string(), ContentType.APPLICATION_JSON);
client().performRequest("PUT", "/" + index + "/doc/1", singletonMap("refresh", "true"), doc);
public String address() {
return serverAddress;
}
@Override
public Connection get() throws SQLException {
return DriverManager.getConnection("jdbc:es://" + serverAddress);
}
}

View File

@ -0,0 +1,59 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.common.CheckedSupplier;
import org.h2.Driver;
import org.junit.rules.ExternalResource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
public class LocalH2 extends ExternalResource implements CheckedSupplier<Connection, SQLException> {
private final Driver driver = Driver.load();
// add any defaults in here
private final Properties DEFAULTS = new Properties();
private final String url;
// H2 in-memory will keep the db alive as long as the connection is opened
private Connection keepAlive;
/*
* The syntax on the connection string is fairly particular:
* mem:; creates an anonymous database in memory. The `;` is
* technically the separator that comes after the name.
* DATABASE_TO_UPPER=false turns *off* H2's Oracle-like habit
* of upper-casing everything that isn't quoted.
* ALIAS_COLUMN_NAME=true turn *on* returning alias names in
* result set metadata which is what most DBs do except
* for MySQL and, by default, H2. Our jdbc driver does it.
*/
public LocalH2() {
this.url = "jdbc:H2:mem:essql;DATABASE_TO_UPPER=false;ALIAS_COLUMN_NAME=true";
}
@Override
protected void before() throws Throwable {
keepAlive = get();
//NOCOMMIT: check timezone issue
keepAlive.createStatement().executeQuery("RUNSCRIPT FROM 'classpath:h2-setup.sql'");
}
@Override
protected void after() {
try {
keepAlive.close();
} catch (SQLException ex) {
// close
}
}
@Override
public Connection get() throws SQLException {
return driver.connect(url, DEFAULTS);
}
}

View File

@ -3,81 +3,106 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util.framework;
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.elasticsearch.common.Strings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver;
import org.junit.Assert;
import org.junit.ClassRule;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.elasticsearch.common.Strings;
import org.junit.Assert;
import org.junit.runners.Parameterized.Parameter;
import static java.lang.String.format;
public abstract class SpecBaseTest {
@Parameter(0)
public String testName;
@Parameter(1)
public Integer lineNumber;
@Parameter(2)
public Path source;
interface Parser {
Object parse(String line);
public abstract class SpecBaseIntegrationTestCase extends ESTestCase {
static {
// Initialize the jdbc driver
JdbcDriver.jdbcMajorVersion();
}
protected final String groupName;
protected final String testName;
protected final Integer lineNumber;
protected final Path source;
@ClassRule
public static EsJdbcServer ES = new EsJdbcServer();
public SpecBaseIntegrationTestCase(String groupName, String testName, Integer lineNumber, Path source) {
this.groupName = groupName;
this.testName = testName;
this.lineNumber = lineNumber;
this.source = source;
}
public Connection esCon() throws Exception {
return ES.get();
}
protected Throwable reworkException(Throwable th) {
StackTraceElement[] stackTrace = th.getStackTrace();
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
System.arraycopy(stackTrace, 0, redone, 1, stackTrace.length);
redone[0] = new StackTraceElement(getClass().getName(), groupName + "." + testName, source.getFileName().toString(), lineNumber);
th.setStackTrace(redone);
return th;
}
//
// spec reader
//
// returns testName, its line location, its source and the custom object (based on each test parser)
protected static List<Object[]> readScriptSpec(String url, Parser parser) throws Exception {
Path source = Paths.get(SpecBaseTest.class.getResource(url).toURI());
Path source = Paths.get(TestUtils.class.getResource(url).toURI());
String groupName = source.getFileName().toString();
List<String> lines = Files.readAllLines(source);
Map<String, Integer> testNames = new LinkedHashMap<>();
List<Object[]> pairs = new ArrayList<>();
String name = null;
String testName = null;
for (int i = 0; i < lines.size(); i++) {
String line = lines.get(i).trim();
// ignore comments
if (!line.isEmpty() && !line.startsWith("//")) {
// parse test name
if (name == null) {
if (testName == null) {
if (testNames.keySet().contains(line)) {
throw new IllegalStateException(format(Locale.ROOT, "Duplicate test name '%s' at line %d (previously seen at line %d)", line, i, testNames.get(line)));
}
else {
name = Strings.capitalize(line);
testNames.put(name, Integer.valueOf(i));
testName = Strings.capitalize(line);
testNames.put(testName, Integer.valueOf(i));
}
}
else {
Object result = parser.parse(line);
// only if the parser is ready, add the object - otherwise keep on serving it lines
if (result != null) {
pairs.add(new Object[] { name, Integer.valueOf(i), source, result });
name = null;
pairs.add(new Object[] { groupName, testName, Integer.valueOf(i), source, result });
testName = null;
}
}
}
}
Assert.assertNull("Cannot find spec for test " + name, name);
Assert.assertNull("Cannot find spec for test " + testName, testName);
return pairs;
}
Throwable reworkException(Throwable th) {
StackTraceElement[] stackTrace = th.getStackTrace();
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
System.arraycopy(stackTrace, 0, redone, 1, stackTrace.length);
redone[0] = new StackTraceElement(getClass().getName(), testName, source.getFileName().toString(), lineNumber);
th.setStackTrace(redone);
return th;
public interface Parser {
Object parse(String line);
}
}

View File

@ -1,49 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util.framework;
import java.sql.Connection;
import java.util.function.Supplier;
import org.elasticsearch.xpack.sql.jdbc.integration.util.H2;
import org.elasticsearch.xpack.sql.jdbc.integration.util.JdbcTemplate;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import static org.junit.Assert.assertNotNull;
@RunWith(Suite.class)
public abstract class SqlInfraSuite extends EsInfra {
private static String REMOTE_H2 = "jdbc:org.elasticsearch.xpack.sql.jdbc.h2:tcp://localhost/./essql";
@ClassRule
public static H2 H2 = new H2(null);
private static JdbcTemplate H2_JDBC;
@BeforeClass
public static void setupDB() throws Exception {
H2_JDBC = new JdbcTemplate(H2);
setupH2();
EsInfra.setupDB();
}
private static void setupH2() throws Exception {
h2().execute("RUNSCRIPT FROM 'classpath:org/elasticsearch/sql/jdbc/integration/org.elasticsearch.xpack.sql.jdbc.h2-setup.sql'");
}
public static Supplier<Connection> h2Con() {
return H2;
}
public static JdbcTemplate h2() {
assertNotNull("H2 connection null - make sure the suite is ran", H2_JDBC);
return H2_JDBC;
}
}

View File

@ -0,0 +1,38 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import com.sun.net.httpserver.HttpExchange;
import org.elasticsearch.client.Client;
import org.elasticsearch.xpack.sql.TestUtils;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ProtoUtils;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Request;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Response;
import org.elasticsearch.xpack.sql.plugin.jdbc.server.JdbcServer;
import org.elasticsearch.xpack.sql.plugin.jdbc.server.JdbcServerProtoUtils;
import org.elasticsearch.xpack.sql.test.server.ProtoHandler;
import java.io.DataInput;
import java.io.IOException;
import static org.elasticsearch.action.ActionListener.wrap;
class SqlProtoHandler extends ProtoHandler<Response> {
private final JdbcServer server;
SqlProtoHandler(Client client) {
super(client, ProtoUtils::readHeader, JdbcServerProtoUtils::write);
this.server = new JdbcServer(TestUtils.planExecutor(client), clusterName, () -> info.getNode().getName(), info.getVersion(), info.getBuild());
}
@Override
protected void handle(HttpExchange http, DataInput in) throws IOException {
Request req = ProtoUtils.readRequest(in);
server.handle(req, wrap(resp -> sendHttpResponse(http, resp), ex -> fail(http, ex)));
}
}

View File

@ -1,58 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util.framework;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.List;
import java.util.Locale;
import org.junit.Test;
import org.junit.runners.Parameterized.Parameter;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.integration.util.JdbcAssert.assertResultSets;
public abstract class SqlSpecBaseTest extends SpecBaseTest {
@Parameter(3)
public String query;
protected static List<Object[]> readScriptSpec(String url) throws Exception {
return SpecBaseTest.readScriptSpec(url, new SqlSpecParser());
}
@Test
public void testQuery() throws Throwable {
// H2 resultset
try (Connection h2 = SqlInfraSuite.h2Con().get();
Connection es = SqlInfraSuite.esCon().get()) {
ResultSet expected, actual;
try {
expected = h2.createStatement().executeQuery(query);
actual = es.createStatement().executeQuery(query);
assertResultSets(expected, actual);
} catch (AssertionError ae) {
throw reworkException(new AssertionError(errorMessage(ae), ae.getCause()));
}
} catch (Throwable th) {
throw reworkException(new RuntimeException(errorMessage(th)));
}
}
String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber, query, th.getMessage());
}
}
class SqlSpecParser implements SpecBaseTest.Parser {
@Override
public Object parse(String line) {
return line.endsWith(";") ? line.substring(0, line.length() - 1) : line;
}
}

View File

@ -3,127 +3,73 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.compare;
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.sql.jdbc.JdbcIntegrationTestCase;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.elasticsearch.xpack.sql.jdbc.h2.SqlSpecIntegrationTest;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.xpack.sql.jdbc.compare.JdbcAssert.assertResultSets;
import static org.junit.Assert.assertEquals;
public abstract class TestUtils {
/**
* Compares Elasticsearch's JDBC driver to H2.
*/
public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
static final DateTimeFormatter UTC_FORMATTER = DateTimeFormatter.ISO_DATE_TIME
.withLocale(Locale.ROOT)
.withZone(ZoneId.of("UTC"));
public final String queryName;
public final String query;
public final Integer lineNumber;
public final Path source;
public static RestClient restClient(String host, int port) {
return RestClient.builder(new HttpHost(host, port)).build();
protected static List<Object[]> readScriptSpec(String spec) throws Exception {
String url = "/" + spec + ".spec";
URL resource = CompareToH2BaseTestCase.class.getResource(url);
if (resource == null) {
throw new IllegalArgumentException("Couldn't find [" + url + "]");
}
Path source = PathUtils.get(resource.toURI());
List<String> lines = Files.readAllLines(source);
Map<String, Integer> testNames = new LinkedHashMap<>();
List<Object[]> ctorArgs = new ArrayList<>();
String name = null;
StringBuilder query = new StringBuilder();
for (int i = 0; i < lines.size(); i++) {
String line = lines.get(i).trim();
// ignore comments
if (!line.isEmpty() && !line.startsWith("//")) {
if (name == null) {
if (testNames.keySet().contains(line)) {
throw new IllegalStateException("Duplicate test name [" + line
+ "] at line [" + i + "] (previously seen at line [" + testNames.get(line) + "])");
} else {
name = line;
testNames.put(name, Integer.valueOf(i));
}
} else {
if (line.endsWith(";")) {
query.append(line.substring(0, line.length() - 1));
}
ctorArgs.add(new Object[] { name, query.toString(), Integer.valueOf(i), source });
name = null;
query.setLength(0);
}
}
}
assertNull("Cannot find query for test " + name, name);
return ctorArgs;
}
public CompareToH2BaseTestCase(String queryName, String query, Integer lineNumber, Path source) {
this.queryName = queryName;
this.query = query;
this.lineNumber = lineNumber;
this.source = source;
public static RestClient restClient(InetAddress address) {
return RestClient.builder(new HttpHost(address)).build();
}
public void testQuery() throws Throwable {
/*
* The syntax on the connection string is fairly particular:
* mem:; creates an anonymous database in memory. The `;` is
* technically the separator that comes after the name.
* DATABASE_TO_UPPER=false turns *off* H2's Oracle-like habit
* of upper-casing everything that isn't quoted.
* ALIAS_COLUMN_NAME=true turn *on* returning alias names in
* result set metadata which is what most DBs do except
* for MySQL and, by default, H2. Our jdbc driver does it.
* RUNSCRIPT FROM 'classpath:/h2-setup.sql' initializes the
* database with test data.
*/
try (Connection h2 = DriverManager.getConnection(
"jdbc:h2:mem:;DATABASE_TO_UPPER=false;ALIAS_COLUMN_NAME=true;INIT=RUNSCRIPT FROM 'classpath:/h2-setup.sql'")) {
fillH2(h2);
try (PreparedStatement h2Query = h2.prepareStatement(query);
ResultSet expected = h2Query.executeQuery()) {
setupElasticsearchIndex();
j.query(query, actual -> {
assertResultSets(expected, actual);
return null;
});
};
}
public static Client client() {
return new PreBuiltTransportClient(Settings.EMPTY)
.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), 9300));
}
private void setupElasticsearchIndex() throws Exception {
public static void index(RestClient client, String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
body.accept(builder);
builder.endObject();
HttpEntity doc = new StringEntity(builder.string(), ContentType.APPLICATION_JSON);
client.performRequest("PUT", "/" + index + "/doc/1", singletonMap("refresh", "true"), doc);
}
public static void loadDatasetInEs(RestClient client) throws Exception {
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
createIndex.startObject("settings"); {
createIndex.field("number_of_shards", 1);
@ -144,7 +90,7 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
createIndex.endObject();
}
createIndex.endObject().endObject();
client().performRequest("PUT", "/emp", emptyMap(), new StringEntity(createIndex.string(), ContentType.APPLICATION_JSON));
client.performRequest("PUT", "/emp", emptyMap(), new StringEntity(createIndex.string(), ContentType.APPLICATION_JSON));
StringBuilder bulk = new StringBuilder();
csvToLines("employees", (titles, fields) -> {
@ -158,18 +104,17 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
}
bulk.append("}\n");
});
client().performRequest("POST", "/emp/emp/_bulk", singletonMap("refresh", "true"),
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
client.performRequest("POST", "/emp/emp/_bulk", singletonMap("refresh", "true"), new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
}
/**
* Fill the h2 database. Note that we have to parse the CSV ourselves
* because h2 interprets the CSV using the default locale which is
* Fill the H2 database. Note that we have to parse the CSV ourselves
* because H2 interprets the CSV using the default locale which is
* randomized by the testing framework. Because some locales (th-TH,
* for example) parse dates in very different ways we parse using the
* root locale.
*/
private void fillH2(Connection h2) throws Exception {
public static void loadDatesetInH2(Connection h2) throws Exception {
csvToLines("employees", (titles, fields) -> {
StringBuilder insert = new StringBuilder("INSERT INTO \"emp.emp\" (");
for (int t = 0; t < titles.size(); t++) {
@ -206,10 +151,9 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
});
}
private void csvToLines(String name,
CheckedBiConsumer<List<String>, List<String>, Exception> consumeLine) throws Exception {
private static void csvToLines(String name, CheckedBiConsumer<List<String>, List<String>, Exception> consumeLine) throws Exception {
String location = "/" + name + ".csv";
URL dataSet = CompareToH2BaseTestCase.class.getResource(location);
URL dataSet = SqlSpecIntegrationTest.class.getResource(location);
if (dataSet == null) {
throw new IllegalArgumentException("Can't find [" + location + "]");
}
@ -222,4 +166,18 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
consumeLine.accept(titles, Arrays.asList(lines.get(l).split(",")));
}
}
Throwable reworkException(Throwable th, Class<?> testSuite, String testName, Path source, int lineNumber) {
StackTraceElement[] stackTrace = th.getStackTrace();
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
System.arraycopy(stackTrace, 0, redone, 1, stackTrace.length);
redone[0] = new StackTraceElement(testSuite.getName(), testName, source.getFileName().toString(), lineNumber);
th.setStackTrace(redone);
return th;
}
}

View File

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.h2;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
/**
* Tests for aggregations created by {@code GROUP BY}.
*/
public class AggIT extends CompareToH2BaseTestCase {
public AggIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("agg");
}
}

View File

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.h2;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
/**
* Tests for functions related to dates and times.
*/
public class DateTimeIT extends CompareToH2BaseTestCase {
public DateTimeIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("datetime");
}
}

View File

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.h2;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
/**
* Tests for filters added by {@code WHERE} clauses.
*/
public class FilterIT extends CompareToH2BaseTestCase {
public FilterIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("filter");
}
}

View File

@ -1,21 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.h2;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
public class MathIT extends CompareToH2BaseTestCase {
public MathIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("math");
}
}

View File

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.h2;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
/**
* Tests for basic {@code SELECT} statements without {@code WHERE} or {@code GROUP BY} or functions.
*/
public class SelectIT extends CompareToH2BaseTestCase {
public SelectIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("select");
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.h2;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase.Parser;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import org.junit.ClassRule;
import org.junit.Test;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Locale;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
public class SqlSpecIntegrationTest extends SpecBaseIntegrationTestCase {
private String query;
@ClassRule
public static LocalH2 H2 = new LocalH2();
@ParametersFactory(shuffle = false, argumentFormatting = "name=%1s")
public static List<Object[]> readScriptSpec() throws Exception {
SqlSpecParser parser = new SqlSpecParser();
return CollectionUtils.combine(
readScriptSpec("/select.sql-spec", parser),
readScriptSpec("/filter.sql-spec", parser),
readScriptSpec("/datetime.sql-spec", parser),
readScriptSpec("/math.sql-spec", parser),
readScriptSpec("/agg.sql-spec", parser));
}
private static class SqlSpecParser implements Parser {
@Override
public Object parse(String line) {
return line.endsWith(";") ? line.substring(0, line.length() - 1) : line;
}
}
public Connection h2Con() throws SQLException {
return H2.get();
}
public SqlSpecIntegrationTest(String groupName, @Name("testName") String testName, Integer lineNumber, Path source, String query) {
super(groupName, testName, lineNumber, source);
this.query = query;
}
@Test
public void testQuery() throws Throwable {
// H2 resultset
try (Connection h2 = h2Con();
Connection es = esCon()) {
ResultSet expected, actual;
try {
expected = h2.createStatement().executeQuery(query);
actual = es.createStatement().executeQuery(query);
assertResultSets(expected, actual);
} catch (AssertionError ae) {
throw reworkException(new AssertionError(errorMessage(ae), ae.getCause()));
}
} catch (Throwable th) {
throw reworkException(new RuntimeException(errorMessage(th)));
}
}
String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber, query, th.getMessage());
}
}

View File

@ -0,0 +1,107 @@
//
// Commands
//
// SHOW_FUNCTIONS
showFunctions
SHOW FUNCTIONS;
name | type
AVG |AGGREGATE
COUNT |AGGREGATE
MAX |AGGREGATE
MIN |AGGREGATE
SUM |AGGREGATE
DAY_OF_MONTH |SCALAR
DAY |SCALAR
DOM |SCALAR
DAY_OF_WEEK |SCALAR
DOW |SCALAR
DAY_OF_YEAR |SCALAR
DOY |SCALAR
HOUR_OF_DAY |SCALAR
HOUR |SCALAR
MINUTE_OF_DAY |SCALAR
MINUTE_OF_HOUR |SCALAR
MINUTE |SCALAR
SECOND_OF_MINUTE|SCALAR
SECOND |SCALAR
MONTH_OF_YEAR |SCALAR
MONTH |SCALAR
YEAR |SCALAR
ABS |SCALAR
ACOS |SCALAR
ASIN |SCALAR
ATAN |SCALAR
CBRT |SCALAR
CEIL |SCALAR
COS |SCALAR
COSH |SCALAR
DEGREES |SCALAR
E |SCALAR
EXP |SCALAR
EXPM1 |SCALAR
FLOOR |SCALAR
LOG |SCALAR
LOG10 |SCALAR
PI |SCALAR
RADIANS |SCALAR
ROUND |SCALAR
SIN |SCALAR
SINH |SCALAR
SQRT |SCALAR
TAN |SCALAR
;
showFunctionsWithExactMatch
SHOW FUNCTIONS LIKE 'ABS';
name | type
ABS |SCALAR
;
showFunctionsWithPatternWildcard
SHOW FUNCTIONS LIKE 'A%';
name | type
AVG |AGGREGATE
ABS |SCALAR
ACOS |SCALAR
ASIN |SCALAR
ATAN |SCALAR
;
showFunctionsWithPatternChar
SHOW FUNCTIONS LIKE 'A__';
name | type
AVG |AGGREGATE
ABS |SCALAR
;
showFunctions
SHOW FUNCTIONS '%DAY%';
name | type
DAY_OF_MONTH |SCALAR
DAY |SCALAR
DAY_OF_WEEK |SCALAR
DAY_OF_YEAR |SCALAR
HOUR_OF_DAY |SCALAR
MINUTE_OF_DAY |SCALAR
;
// DESCRIBE
describe
DESCRIBE "test_emp.emp";
column | type
birth_date |TIMESTAMP_WITH_TIMEZONE
emp_no |INTEGER
first_name |VARCHAR
gender |VARCHAR
hire_date |TIMESTAMP_WITH_TIMEZONE
last_name |VARCHAR
;

View File

@ -0,0 +1,53 @@
//
// Spec used for debugging a certain test (without having to alter the spec suite of which it might be part of)
//
debug
SHOW FUNCTIONS;
name | type
AVG |AGGREGATE
COUNT |AGGREGATE
MAX |AGGREGATE
MIN |AGGREGATE
SUM |AGGREGATE
DAY_OF_MONTH |SCALAR
DAY |SCALAR
DOM |SCALAR
DAY_OF_WEEK |SCALAR
DOW |SCALAR
DAY_OF_YEAR |SCALAR
DOY |SCALAR
HOUR_OF_DAY |SCALAR
HOUR |SCALAR
MINUTE_OF_DAY |SCALAR
MINUTE_OF_HOUR |SCALAR
MINUTE |SCALAR
SECOND_OF_MINUTE|SCALAR
SECOND |SCALAR
MONTH_OF_YEAR |SCALAR
MONTH |SCALAR
YEAR |SCALAR
ABS |SCALAR
ACOS |SCALAR
ASIN |SCALAR
ATAN |SCALAR
CBRT |SCALAR
CEIL |SCALAR
COS |SCALAR
COSH |SCALAR
DEGREES |SCALAR
E |SCALAR
EXP |SCALAR
EXPM1 |SCALAR
FLOOR |SCALAR
LOG |SCALAR
LOG10 |SCALAR
PI |SCALAR
RADIANS |SCALAR
ROUND |SCALAR
SIN |SCALAR
SINH |SCALAR
SQRT |SCALAR
TAN |SCALAR
;

View File

@ -0,0 +1,6 @@
//
// Spec used for debugging a certain test (without having to alter the spec suite of which it might be part of)
//
debug
SELECT YEAR(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "test_emp.emp" GROUP BY YEAR(birth_date) ORDER BY YEAR(birth_date) LIMIT 5;

View File

@ -0,0 +1,31 @@
//
// Full-text
//
simpleQueryAllFields
SELECT emp_no, first_name, gender, last_name FROM test_emp.emp WHERE QUERY('Baek fox') LIMIT 3;
emp_no | first_name | gender | last_name
10080 |Premal |M |Baek
;
simpleQueryDedicatedField
SELECT emp_no, first_name, gender, last_name FROM test_emp.emp WHERE QUERY('Man*', 'fields=last_name') LIMIT 5;
emp_no | first_name | gender | last_name
10096 |Jayson |M |Mandell
;
matchQuery
SELECT emp_no, first_name, gender, last_name FROM test_emp.emp WHERE MATCH(first_name, 'Erez');
emp_no | first_name | gender | last_name
10076 |Erez |F |Ritzmann
;
multiMatchQuery
SELECT emp_no, first_name, gender, last_name FROM test_emp.emp WHERE MATCH('first_name,last_name', 'Morton', 'type=best_fields;default_operator=OR');
emp_no | first_name | gender | last_name
10095 |Hilari |M |Morton
;

View File

@ -1,7 +1,9 @@
CREATE TABLE "emp.emp" ("birth_date" TIMESTAMP,
DROP TABLE IF EXISTS "test_emp.emp";
CREATE TABLE "test_emp.emp" ("birth_date" TIMESTAMP,
"emp_no" INT,
"first_name" VARCHAR(50),
"gender" VARCHAR(1),
"hire_date" TIMESTAMP,
"last_name" VARCHAR(50)
);
)
AS SELECT * FROM CSVREAD('classpath:/employees.csv');