Consolidate spec testing

Remove some duplicated methods, add some templating plus logging of
ES resultset (for easier debugging)
Rename debug test for CSV plus add one for Sql spec

Original commit: elastic/x-pack-elasticsearch@d2c46a2ed2
This commit is contained in:
Costin Leau 2017-08-21 20:57:17 +03:00
parent a27c726f72
commit 18dccbc668
7 changed files with 161 additions and 123 deletions

View File

@ -30,7 +30,6 @@ import java.util.List;
import java.util.Locale;
import java.util.Properties;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
import static org.hamcrest.Matchers.arrayWithSize;
/**
@ -42,7 +41,7 @@ public class CsvSpecIT extends SpecBaseIntegrationTestCase {
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
public static List<Object[]> readScriptSpec() throws Exception {
CsvSpecParser parser = new CsvSpecParser();
Parser parser = specParser();
return CollectionUtils.combine(
readScriptSpec("/command.csv-spec", parser),
readScriptSpec("/fulltext.csv-spec", parser),
@ -56,12 +55,9 @@ public class CsvSpecIT extends SpecBaseIntegrationTestCase {
this.testCase = testCase;
}
public void test() throws Throwable {
try {
@Override
protected final void doTest() throws Throwable {
assertMatchesCsv(testCase.query, testName, testCase.expectedResults);
} catch (AssertionError ae) {
throw reworkException(ae);
}
}
private void assertMatchesCsv(String query, String csvTableName, String expectedResults) throws SQLException, IOException {
@ -90,8 +86,8 @@ public class CsvSpecIT extends SpecBaseIntegrationTestCase {
.executeQuery("SELECT * FROM " + csvTableName);
// trigger data loading for type inference
expected.beforeFirst();
ResultSet actual = executeJdbcQuery(es, query);
assertResultSets(expected, actual);
ResultSet elasticResults = executeJdbcQuery(es, query);
assertResults(expected, elasticResults);
}
}
@ -147,19 +143,11 @@ public class CsvSpecIT extends SpecBaseIntegrationTestCase {
}
}
protected void assertResults(ResultSet expected, ResultSet actual) throws SQLException {
assertResultSets(expected, actual);
static CsvSpecParser specParser() {
return new CsvSpecParser();
}
private ResultSet executeJdbcQuery(Connection con, String query) throws SQLException {
Statement statement = con.createStatement();
//statement.setFetchSize(randomInt(10));
// NOCOMMIT: hook up pagination
statement.setFetchSize(1000);
return statement.executeQuery(query);
}
protected static class CsvSpecParser implements Parser {
private static class CsvSpecParser implements Parser {
private final StringBuilder data = new StringBuilder();
private CsvTestCase testCase;
@ -172,10 +160,6 @@ public class CsvSpecIT extends SpecBaseIntegrationTestCase {
testCase.query = line.endsWith(";") ? line.substring(0, line.length() - 1) : line;
}
else {
// read CSV header
// if (fragment.columnNames == null) {
// fragment.columnNames = line;
// }
// read data
if (line.startsWith(";")) {
testCase.expectedResults = data.toString();

View File

@ -11,27 +11,23 @@ import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcTestUtils;
import java.nio.file.Path;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
@TestLogging("org.elasticsearch.xpack.sql:TRACE")
public class DebugCsvSpecIT extends CsvSpecIT {
@TestLogging(JdbcTestUtils.SQL_TRACE)
public class DebugCsvSpec extends CsvSpecIT {
@ParametersFactory(shuffle = false, argumentFormatting = SqlSpecIT.PARAM_FORMATTING) // NOCOMMIT are we sure?!
@ParametersFactory(shuffle = false, argumentFormatting = SqlSpecIT.PARAM_FORMATTING)
public static List<Object[]> readScriptSpec() throws Exception {
//JdbcTestUtils.sqlLogging();
CsvSpecParser parser = new CsvSpecParser();
Parser parser = specParser();
return readScriptSpec("/debug.csv-spec", parser);
}
public DebugCsvSpecIT(String groupName, String testName, Integer lineNumber, Path source, CsvTestCase testCase) {
public DebugCsvSpec(String groupName, String testName, Integer lineNumber, Path source, CsvTestCase testCase) {
super(groupName, testName, lineNumber, source, testCase);
}
@Override
public void assertResults(ResultSet expected, ResultSet actual) throws SQLException {
JdbcTestUtils.resultSetToLogger(logger, actual);
protected boolean logEsResultSet() {
return true;
}
}

View File

@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.xpack.sql.jdbc.framework.JdbcTestUtils;
import java.nio.file.Path;
import java.util.List;
@TestLogging(JdbcTestUtils.SQL_TRACE)
public class DebugSqlSpec extends SqlSpecIT {
@ParametersFactory(shuffle = false, argumentFormatting = SqlSpecIT.PARAM_FORMATTING)
public static List<Object[]> readScriptSpec() throws Exception {
Parser parser = specParser();
return readScriptSpec("/debug.sql-spec", parser);
}
public DebugSqlSpec(String groupName, String testName, Integer lineNumber, Path source, String query) {
super(groupName, testName, lineNumber, source, query);
}
@Override
protected boolean logEsResultSet() {
return true;
}
}

View File

@ -10,18 +10,13 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.xpack.sql.jdbc.framework.LocalH2;
import org.elasticsearch.xpack.sql.jdbc.framework.SpecBaseIntegrationTestCase;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import org.junit.Before;
import org.junit.ClassRule;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Locale;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.framework.JdbcAssert.assertResultSets;
/**
* Tests comparing sql queries executed against our jdbc client
@ -39,7 +34,7 @@ public class SqlSpecIT extends SpecBaseIntegrationTestCase {
// example for enabling logging
//JdbcTestUtils.sqlLogging();
Parser parser = parser();
Parser parser = specParser();
return CollectionUtils.combine(
readScriptSpec("/select.sql-spec", parser),
readScriptSpec("/filter.sql-spec", parser),
@ -63,7 +58,7 @@ public class SqlSpecIT extends SpecBaseIntegrationTestCase {
}
}
static SqlSpecParser parser() {
static SqlSpecParser specParser() {
return new SqlSpecParser();
}
@ -72,31 +67,21 @@ public class SqlSpecIT extends SpecBaseIntegrationTestCase {
this.query = query;
}
public void test() throws Throwable {
@Before
public void testDateTime() {
assumeFalse("Date time tests have time zone problems", "datetime".equals(groupName));
}
@Override
protected final void doTest() throws Throwable {
try (Connection h2 = H2.get();
Connection es = esJdbc()) {
ResultSet expected, actual;
try {
expected = executeJdbcQuery(h2);
actual = executeJdbcQuery(es);
assertResultSets(expected, actual);
} catch (AssertionError ae) {
throw reworkException(ae);
}
}
}
ResultSet expected, elasticResults;
expected = executeJdbcQuery(h2, query);
elasticResults = executeJdbcQuery(es, query);
private ResultSet executeJdbcQuery(Connection con) throws SQLException {
Statement statement = con.createStatement();
//statement.setFetchSize(randomInt(10));
// NOCOMMIT: hook up pagination
statement.setFetchSize(1000);
return statement.executeQuery(query);
}
String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", testName, source.getFileName().toString(), lineNumber, query, th.getMessage());
assertResults(expected, elasticResults);
}
}
}

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.apache.logging.log4j.Logger;
import java.sql.JDBCType;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
@ -24,14 +26,26 @@ public class JdbcAssert {
private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
public static void assertResultSets(ResultSet expected, ResultSet actual) throws SQLException {
assertResultSetMetadata(expected, actual);
assertResultSetData(expected, actual);
assertResultSets(expected, actual, null);
}
public static void assertResultSets(ResultSet expected, ResultSet actual, Logger logger) throws SQLException {
assertResultSetMetadata(expected, actual, logger);
assertResultSetData(expected, actual, logger);
}
public static void assertResultSetMetadata(ResultSet expected, ResultSet actual) throws SQLException {
assertResultSetMetadata(expected, actual, null);
}
public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, Logger logger) throws SQLException {
ResultSetMetaData expectedMeta = expected.getMetaData();
ResultSetMetaData actualMeta = actual.getMetaData();
if (logger != null) {
JdbcTestUtils.logResultSetMetadata(actual, logger);
}
if (expectedMeta.getColumnCount() != actualMeta.getColumnCount()) {
List<String> expectedCols = new ArrayList<>();
for (int i = 1; i <= expectedMeta.getColumnCount(); i++) {
@ -74,7 +88,7 @@ public class JdbcAssert {
}
}
public static void assertResultSetData(ResultSet expected, ResultSet actual) throws SQLException {
public static void assertResultSetData(ResultSet expected, ResultSet actual, Logger logger) throws SQLException {
ResultSetMetaData metaData = expected.getMetaData();
int columns = metaData.getColumnCount();
@ -83,6 +97,10 @@ public class JdbcAssert {
assertTrue("Expected more data but no more entries found after [" + count + "]", actual.next());
count++;
if (logger != null) {
JdbcTestUtils.logResultSetCurrentData(actual, logger);
}
for (int column = 1; column <= columns; column++) {
Object expectedObject = expected.getObject(column);
Object actualObject = actual.getObject(column);

View File

@ -6,63 +6,18 @@
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Map;
import java.util.Map.Entry;
public abstract class JdbcTestUtils {
public static void sqlLogging() {
String t = "TRACE";
String d = "DEBUG";
public static final String SQL_TRACE = "org.elasticsearch.xpack.sql.test:INFO, org.elasticsearch.xpack.sql:TRACE";
Map<String, String> of = CollectionUtils.of("org.elasticsearch.xpack.sql.parser", t,
"org.elasticsearch.xpack.sql.analysis.analyzer", t,
"org.elasticsearch.xpack.sql.optimizer", t,
"org.elasticsearch.xpack.sql.rule", t,
"org.elasticsearch.xpack.sql.planner", t,
"org.elasticsearch.xpack.sql.execution.search", t);
for (Entry<String, String> entry : of.entrySet()) {
Loggers.setLevel(Loggers.getLogger(entry.getKey()), entry.getValue());
}
}
public static void printResultSet(ResultSet set) throws Exception {
Logger logger = Loggers.getLogger("org.elasticsearch.xpack.sql.test");
Loggers.setLevel(logger, "INFO");
ResultSetMetaData metaData = set.getMetaData();
// header
StringBuilder sb = new StringBuilder();
int colSize = 15;
for (int column = 1; column <= metaData.getColumnCount(); column++) {
String colName = metaData.getColumnName(column);
int size = colName.length();
if (column > 1) {
sb.append("|");
size++;
}
sb.append(colName);
for (int i = size; i < colSize; i++) {
sb.append(" ");
}
}
logger.info(sb.toString());
}
private static final int MAX_WIDTH = 20;
public static void resultSetToLogger(Logger log, ResultSet rs) throws SQLException {
public static void logResultSetMetadata(ResultSet rs, Logger logger) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
// header
StringBuilder sb = new StringBuilder();
StringBuilder column = new StringBuilder();
@ -81,11 +36,23 @@ public abstract class JdbcTestUtils {
}
int l = sb.length();
sb.append("\n");
logger.info(sb.toString());
sb.setLength(0);
for (int i = 0; i < l; i++) {
sb.append("=");
sb.append("-");
}
log.info(sb);
logger.info(sb.toString());
}
private static final int MAX_WIDTH = 20;
public static void logResultSetData(ResultSet rs, Logger log) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
StringBuilder sb = new StringBuilder();
StringBuilder column = new StringBuilder();
int columns = metaData.getColumnCount();
while (rs.next()) {
sb.setLength(0);
@ -100,6 +67,23 @@ public abstract class JdbcTestUtils {
}
}
public static void logResultSetCurrentData(ResultSet rs, Logger log) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
StringBuilder column = new StringBuilder();
int columns = metaData.getColumnCount();
StringBuilder sb = new StringBuilder();
for (int i = 1; i <= columns; i++) {
column.setLength(0);
if (i > 1) {
sb.append(" | ");
}
sb.append(trimOrPad(column.append(rs.getString(i))));
}
log.info(sb);
}
private static StringBuilder trimOrPad(StringBuilder buffer) {
if (buffer.length() > MAX_WIDTH) {
buffer.setLength(MAX_WIDTH - 1);

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.jdbc.framework;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.PathUtils;
@ -14,6 +15,10 @@ import org.junit.Before;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
@ -48,6 +53,11 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
}
}
@Override
protected boolean preserveIndicesUponCompletion() {
return true;
}
@AfterClass
public static void wipeTestData() throws IOException {
if (false == EMBED_SQL) {
@ -62,12 +72,40 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
}
}
@Override
protected boolean preserveIndicesUponCompletion() {
return true;
public final void test() throws Throwable {
try {
doTest();
} catch (AssertionError ae) {
throw reworkException(ae);
}
}
protected Throwable reworkException(Throwable th) {
/**
* Implementations should pay attention on using {@link #executeJdbcQuery(Connection, String)} (typically for ES connections)
* and {@link #assertResults(ResultSet, ResultSet)} which takes into account logging/debugging results (through {@link #logEsResultSet()}.
*
* @throws Throwable
*/
protected abstract void doTest() throws Throwable;
protected ResultSet executeJdbcQuery(Connection con, String query) throws SQLException {
Statement statement = con.createStatement();
//statement.setFetchSize(randomInt(10));
// NOCOMMIT: hook up pagination
statement.setFetchSize(1000);
return statement.executeQuery(query);
}
protected boolean logEsResultSet() {
return false;
}
protected void assertResults(ResultSet expected, ResultSet elastic) throws SQLException {
Logger log = logEsResultSet() ? logger : null;
JdbcAssert.assertResultSets(expected, elastic, log);
}
private Throwable reworkException(Throwable th) {
StackTraceElement[] stackTrace = th.getStackTrace();
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
System.arraycopy(stackTrace, 0, redone, 1, stackTrace.length);