Un-AwaitsFix SQL's jdbc integration tests (elastic/x-pack-elasticsearch#2217)
Most tests worked fine. The datetime tests are broken for some time zones. The csv tests were broken because they accepted the default fetch size which looks like it is broken. Original commit: elastic/x-pack-elasticsearch@e034c2f102
This commit is contained in:
parent
85937c6e78
commit
f7d526036e
|
@ -7,25 +7,45 @@ package org.elasticsearch.xpack.sql.jdbc;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.sql.util.CollectionUtils;
|
||||
import org.relique.io.TableReader;
|
||||
import org.relique.jdbc.csv.CsvConnection;
|
||||
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Properties;
|
||||
|
||||
import static java.lang.String.format;
|
||||
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
|
||||
|
||||
/**
|
||||
* Tests comparing sql queries executed against our jdbc client
|
||||
* with hard coded result sets.
|
||||
*/
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/2074")
|
||||
public class CsvSpecIT extends SpecBaseIntegrationTestCase {
|
||||
/**
|
||||
* Properties used when settings up a CSV-based jdbc connection.
|
||||
*/
|
||||
private static final Properties CSV_PROPERTIES = new Properties();
|
||||
static {
|
||||
CSV_PROPERTIES.setProperty("charset", "UTF-8");
|
||||
// trigger auto-detection
|
||||
CSV_PROPERTIES.setProperty("columnTypes", "");
|
||||
CSV_PROPERTIES.setProperty("separator", "|");
|
||||
CSV_PROPERTIES.setProperty("trimValues", "true");
|
||||
}
|
||||
|
||||
private final CsvTestCase testCase;
|
||||
|
||||
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTNG) // NOCOMMIT are we sure?!
|
||||
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
CsvSpecParser parser = new CsvSpecParser();
|
||||
return CollectionUtils.combine(
|
||||
|
@ -46,6 +66,36 @@ public class CsvSpecIT extends SpecBaseIntegrationTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private void assertMatchesCsv(String query, String csvTableName, String expectedResults) throws SQLException {
|
||||
Reader reader = new StringReader(expectedResults);
|
||||
TableReader tableReader = new TableReader() {
|
||||
@Override
|
||||
public Reader getReader(Statement statement, String tableName) throws SQLException {
|
||||
return reader;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getTableNames(Connection connection) throws SQLException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
try (Connection csv = new CsvConnection(tableReader, CSV_PROPERTIES, "") {};
|
||||
Connection es = esJdbc()) {
|
||||
// pass the testName as table for debugging purposes (in case the underlying reader is missing)
|
||||
ResultSet expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)
|
||||
.executeQuery("SELECT * FROM " + csvTableName);
|
||||
// trigger data loading for type inference
|
||||
expected.beforeFirst();
|
||||
Statement statement = es.createStatement();
|
||||
//statement.setFetchSize(randomInt(10));
|
||||
// NOCOMMIT: hook up pagination
|
||||
// NOCOMMIT sometimes accept the default fetch size. I believe it is 0 now which breaks things.
|
||||
statement.setFetchSize(1000);
|
||||
ResultSet actual = statement.executeQuery(query);
|
||||
assertResultSets(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
String errorMessage(Throwable th) {
|
||||
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber,
|
||||
testCase.query, th.getMessage());
|
||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.jdbc;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||
import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
|
||||
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
|
||||
import org.elasticsearch.xpack.sql.util.CollectionUtils;
|
||||
|
@ -28,14 +27,13 @@ import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResult
|
|||
* Tests comparing sql queries executed against our jdbc client
|
||||
* with those executed against H2's jdbc client.
|
||||
*/
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/2074")
|
||||
public class SqlSpecIT extends SpecBaseIntegrationTestCase {
|
||||
private String query;
|
||||
|
||||
@ClassRule
|
||||
public static LocalH2 H2 = new LocalH2();
|
||||
|
||||
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTNG)
|
||||
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
|
||||
// example for enabling logging
|
||||
|
@ -63,6 +61,7 @@ public class SqlSpecIT extends SpecBaseIntegrationTestCase {
|
|||
}
|
||||
|
||||
public void test() throws Throwable {
|
||||
assumeFalse("Date time tests have time zone problems", "datetime".equals(groupName));
|
||||
try (Connection h2 = H2.get();
|
||||
Connection es = esJdbc()) {
|
||||
ResultSet expected, actual;
|
||||
|
|
|
@ -17,22 +17,13 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
|||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.junit.ClassRule;
|
||||
import org.relique.io.TableReader;
|
||||
import org.relique.jdbc.csv.CsvConnection;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
|
||||
|
||||
public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
|
||||
/**
|
||||
|
@ -45,19 +36,7 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
|
|||
* but is not canonical because it runs against a different HTTP server
|
||||
* then JDBC will use in production. Gradle always uses non-embedded.
|
||||
*/
|
||||
private static final boolean EMBED_SQL = Booleans.parseBoolean(System.getProperty("tests.embed.sql", "false"));
|
||||
|
||||
/**
|
||||
* Properties used when settings up a CSV-based jdbc connection.
|
||||
*/
|
||||
private static final Properties CSV_PROPERTIES = new Properties();
|
||||
static {
|
||||
CSV_PROPERTIES.setProperty("charset", "UTF-8");
|
||||
// trigger auto-detection
|
||||
CSV_PROPERTIES.setProperty("columnTypes", "");
|
||||
CSV_PROPERTIES.setProperty("separator", "|");
|
||||
CSV_PROPERTIES.setProperty("trimValues", "true");
|
||||
}
|
||||
protected static final boolean EMBED_SQL = Booleans.parseBoolean(System.getProperty("tests.embed.sql", "false"));
|
||||
|
||||
@ClassRule
|
||||
public static final CheckedSupplier<Connection, SQLException> ES = EMBED_SQL ? new EmbeddedJdbcServer() : () ->
|
||||
|
@ -75,31 +54,6 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
|
|||
client().performRequest("PUT", "/" + index + "/doc/1", singletonMap("refresh", "true"), doc);
|
||||
}
|
||||
|
||||
public void assertMatchesCsv(String query, String csvTableName, String expectedResults) throws SQLException {
|
||||
Reader reader = new StringReader(expectedResults);
|
||||
TableReader tableReader = new TableReader() {
|
||||
@Override
|
||||
public Reader getReader(Statement statement, String tableName) throws SQLException {
|
||||
return reader;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getTableNames(Connection connection) throws SQLException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
try (Connection csv = new CsvConnection(tableReader, CSV_PROPERTIES, "") {};
|
||||
Connection es = esJdbc()) {
|
||||
// pass the testName as table for debugging purposes (in case the underlying reader is missing)
|
||||
ResultSet expected = csv.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)
|
||||
.executeQuery("SELECT * FROM " + csvTableName);
|
||||
// trigger data loading for type inference
|
||||
expected.beforeFirst();
|
||||
ResultSet actual = es.createStatement().executeQuery(query);
|
||||
assertResultSets(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
protected String clusterName() {
|
||||
try {
|
||||
String response = EntityUtils.toString(client().performRequest("GET", "/").getEntity());
|
||||
|
@ -108,8 +62,4 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
|
|||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
protected static void loadDatasetIntoEs() throws Exception {
|
||||
DataLoader.loadDatasetIntoEs(client());
|
||||
}
|
||||
}
|
|
@ -5,11 +5,13 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.jdbc.framework;
|
||||
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
|
@ -25,29 +27,13 @@ import static java.lang.String.format;
|
|||
* after loading a specific set of test data.
|
||||
*/
|
||||
public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCase {
|
||||
protected static final String PARAM_FORMATTNG = "%0$s.test%2$s";
|
||||
|
||||
private static final boolean SETUP_DATA = Booleans.parseBoolean(System.getProperty("tests.sql.setup.data", "false"));
|
||||
protected static final String PARAM_FORMATTING = "%0$s.test%2$s";
|
||||
|
||||
protected final String groupName;
|
||||
protected final String testName;
|
||||
protected final Integer lineNumber;
|
||||
protected final Path source;
|
||||
|
||||
@BeforeClass
|
||||
public static void setupTestData() throws Exception {
|
||||
if (!SETUP_DATA) {
|
||||
// We only need to load the test data once
|
||||
return;
|
||||
}
|
||||
loadDatasetIntoEs();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveIndicesUponCompletion() {
|
||||
return !SETUP_DATA;
|
||||
}
|
||||
|
||||
public SpecBaseIntegrationTestCase(String groupName, String testName, Integer lineNumber, Path source) {
|
||||
this.groupName = groupName;
|
||||
this.testName = testName;
|
||||
|
@ -55,11 +41,38 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
|
|||
this.source = source;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupTestDataIfNeeded() throws Exception {
|
||||
if (client().performRequest("HEAD", "/test_emp").getStatusLine().getStatusCode() == 404) {
|
||||
DataLoader.loadDatasetIntoEs(client());
|
||||
}
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void wipeTestData() throws IOException {
|
||||
if (false == EMBED_SQL) {
|
||||
try {
|
||||
adminClient().performRequest("DELETE", "/*");
|
||||
} catch (ResponseException e) {
|
||||
// 404 here just means we had no indexes
|
||||
if (e.getResponse().getStatusLine().getStatusCode() != 404) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveIndicesUponCompletion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
protected Throwable reworkException(Throwable th) {
|
||||
StackTraceElement[] stackTrace = th.getStackTrace();
|
||||
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
|
||||
System.arraycopy(stackTrace, 0, redone, 1, stackTrace.length);
|
||||
redone[0] = new StackTraceElement(getClass().getName(), groupName + ".test" + testName, source.getFileName().toString(), lineNumber);
|
||||
redone[0] = new StackTraceElement(getClass().getName(), groupName + ".test" + testName,
|
||||
source.getFileName().toString(), lineNumber);
|
||||
|
||||
th.setStackTrace(redone);
|
||||
return th;
|
||||
|
@ -68,7 +81,7 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
|
|||
//
|
||||
// spec reader
|
||||
//
|
||||
|
||||
|
||||
// returns groupName, testName, its line location, its source and the custom object (based on each test parser)
|
||||
protected static List<Object[]> readScriptSpec(String url, Parser parser) throws Exception {
|
||||
Path source = PathUtils.get(SpecBaseIntegrationTestCase.class.getResource(url).toURI());
|
||||
|
@ -89,7 +102,8 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
|
|||
// parse test name
|
||||
if (testName == null) {
|
||||
if (testNames.keySet().contains(line)) {
|
||||
throw new IllegalStateException(format(Locale.ROOT, "Duplicate test name '%s' at line %d (previously seen at line %d)", line, i, testNames.get(line)));
|
||||
throw new IllegalStateException(format(Locale.ROOT,
|
||||
"Duplicate test name '%s' at line %d (previously seen at line %d)", line, i, testNames.get(line)));
|
||||
}
|
||||
else {
|
||||
testName = Strings.capitalize(line);
|
||||
|
|
|
@ -101,8 +101,8 @@ MINUTE_OF_DAY |SCALAR
|
|||
showTables
|
||||
SHOW TABLES;
|
||||
|
||||
index | type
|
||||
test_emp |emp
|
||||
table
|
||||
test_emp
|
||||
;
|
||||
|
||||
// DESCRIBE
|
||||
|
|
|
@ -5,27 +5,27 @@
|
|||
simpleQueryAllFields
|
||||
SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE QUERY('Baek fox') LIMIT 3;
|
||||
|
||||
emp_no | first_name | gender | last_name
|
||||
10080 |Premal |M |Baek
|
||||
emp_no | first_name | gender | last_name
|
||||
10080 |Premal |M |Baek
|
||||
;
|
||||
|
||||
simpleQueryDedicatedField
|
||||
SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE QUERY('Man*', 'fields=last_name') LIMIT 5;
|
||||
|
||||
emp_no | first_name | gender | last_name
|
||||
10096 |Jayson |M |Mandell
|
||||
emp_no | first_name | gender | last_name
|
||||
10096 |Jayson |M |Mandell
|
||||
;
|
||||
|
||||
matchQuery
|
||||
SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE MATCH(first_name, 'Erez');
|
||||
|
||||
emp_no | first_name | gender | last_name
|
||||
10076 |Erez |F |Ritzmann
|
||||
emp_no | first_name | gender | last_name
|
||||
10076 |Erez |F |Ritzmann
|
||||
;
|
||||
|
||||
multiMatchQuery
|
||||
SELECT emp_no, first_name, gender, last_name FROM test_emp WHERE MATCH('first_name,last_name', 'Morton', 'type=best_fields;default_operator=OR');
|
||||
|
||||
emp_no | first_name | gender | last_name
|
||||
10095 |Hilari |M |Morton
|
||||
emp_no | first_name | gender | last_name
|
||||
10095 |Hilari |M |Morton
|
||||
;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
wildcardWithOrder
|
||||
SELECT * FROM "test_emp" ORDER BY emp_no;
|
||||
column
|
||||
SELECT last_name FROM "test_emp" ORDER BY emp_no;
|
||||
SELECT last_name FROM "test_emp" ORDER BY emp_no;
|
||||
columnWithAlias
|
||||
SELECT last_name AS l FROM "test_emp" ORDER BY emp_no;
|
||||
columnWithAliasNoAs
|
||||
|
@ -24,7 +24,7 @@ SELECT * FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
|||
wildcardWithOrderWithLimit
|
||||
SELECT * FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||
columnWithLimit
|
||||
SELECT last_name FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||
SELECT last_name FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||
columnWithAliasWithLimit
|
||||
SELECT last_name AS l FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||
columnWithAliasNoAsWithLimit
|
||||
|
@ -53,4 +53,4 @@ SELECT CAST(emp_no AS REAL) AS emp_no_cast FROM "test_emp" ORDER BY emp_no LIMIT
|
|||
castOnColumnNumberToDouble
|
||||
SELECT CAST(emp_no AS DOUBLE) AS emp_no_cast FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||
castOnColumnNumberToBoolean
|
||||
SELECT CAST(emp_no AS BOOL) AS emp_no_cast FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||
SELECT CAST(emp_no AS BOOL) AS emp_no_cast FROM "test_emp" ORDER BY emp_no LIMIT 5;
|
||||
|
|
Loading…
Reference in New Issue