Forbidden APIs in JDBC

Original commit: elastic/x-pack-elasticsearch@0c8c54911f
This commit is contained in:
Nik Everett 2017-06-21 16:32:15 -04:00
parent 50aa97e3cf
commit aa784f9f46
26 changed files with 188 additions and 285 deletions

View File

@ -5,54 +5,23 @@
*/
package org.elasticsearch.xpack.sql.jdbc.net.protocol;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Proto.Action;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Properties;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Proto.Action;
import static org.elasticsearch.xpack.sql.net.client.util.StringUtils.EMPTY;
public class InfoRequest extends Request {
public final String jvmVersion, jvmVendor, jvmClassPath, osName, osVersion;
public InfoRequest(Properties props) {
public InfoRequest() {
super(Action.INFO);
jvmVersion = props.getProperty("java.version", EMPTY);
jvmVendor = props.getProperty("java.vendor", EMPTY);
jvmClassPath = props.getProperty("java.class.path", EMPTY);
osName = props.getProperty("os.name", EMPTY);
osVersion = props.getProperty("os.version", EMPTY);
}
public InfoRequest(String jvmVersion, String jvmVendor, String jvmClassPath, String osName, String osVersion) {
super(Action.INFO);
this.jvmVersion = jvmVersion;
this.jvmVendor = jvmVendor;
this.jvmClassPath = jvmClassPath;
this.osName = osName;
this.osVersion = osVersion;
}
@Override
public void encode(DataOutput out) throws IOException {
out.writeInt(action.value());
out.writeUTF(jvmVersion);
out.writeUTF(jvmVendor);
out.writeUTF(jvmClassPath);
out.writeUTF(osName);
out.writeUTF(osVersion);
}
public static InfoRequest decode(DataInput in) throws IOException {
String jvmVersion = in.readUTF();
String jvmVendor = in.readUTF();
String jvmClassPath = in.readUTF();
String osName = in.readUTF();
String osVersion = in.readUTF();
return new InfoRequest(jvmVersion, jvmVendor, jvmClassPath, osName, osVersion);
return new InfoRequest();
}
}

View File

@ -53,6 +53,12 @@ dependencyLicenses {
ignoreSha 'net-client'
}
forbiddenApisTest {
//we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage
bundledSignatures -= 'jdk-non-portable'
bundledSignatures += 'jdk-internal'
}
// TODO seems like we should use the jars....
jar {
from(zipTree(project(':x-pack-elasticsearch:sql-clients:net-client').jar.archivePath))

View File

@ -5,10 +5,18 @@
*/
package org.elasticsearch.xpack.sql.jdbc.debug;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.jdbc.util.IOUtils;
import org.elasticsearch.xpack.sql.net.client.SuppressForbidden;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Proxy;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.CallableStatement;
import java.sql.Connection;
@ -19,12 +27,9 @@ import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.jdbc.util.IOUtils;
public final class Debug {
// cache for streams created by ourselves
@ -94,7 +99,7 @@ public final class Debug {
// System.out/err can be changed so do some checks
if ("err".equals(out)) {
PrintStream sys = System.err;
PrintStream sys = stderr();
if (SYS_ERR == null) {
SYS_ERR = sys;
@ -105,13 +110,13 @@ public final class Debug {
ERR = null;
}
if (ERR == null) {
ERR = new DebugLog(new PrintWriter(sys));
ERR = new DebugLog(new PrintWriter(new OutputStreamWriter(sys, StandardCharsets.UTF_8)));
}
return ERR;
}
if ("out".equals(out)) {
PrintStream sys = System.out;
PrintStream sys = stdout();
if (SYS_OUT == null) {
SYS_OUT = sys;
@ -124,7 +129,7 @@ public final class Debug {
}
if (OUT == null) {
OUT = new DebugLog(new PrintWriter(sys));
OUT = new DebugLog(new PrintWriter(new OutputStreamWriter(sys, StandardCharsets.UTF_8)));
}
return OUT;
}
@ -134,7 +139,7 @@ public final class Debug {
if (log == null) {
// must be local file
try {
PrintWriter print = new PrintWriter(Paths.get("").resolve(out).toFile(), "UTF-8");
PrintWriter print = new PrintWriter(Files.newBufferedWriter(Paths.get("").resolve(out), StandardCharsets.UTF_8));
log = new DebugLog(print);
OUTPUT_CACHE.put(out, log);
OUTPUT_REFS.put(out, Integer.valueOf(0));
@ -189,4 +194,14 @@ public final class Debug {
OUTPUT_MANAGED.clear();
}
// NOCOMMIT loggers instead, I think
@SuppressForbidden(reason="temporary")
private static PrintStream stdout() {
return System.out;
}
@SuppressForbidden(reason="temporary")
private static PrintStream stderr() {
return System.err;
}
}

View File

@ -5,18 +5,17 @@
*/
package org.elasticsearch.xpack.sql.jdbc.debug;
import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
import java.io.PrintWriter;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.util.Locale;
import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
//
// Logging is done through PrintWriter (not PrintStream which maps to System.err/out) to plug into the JDBC API
// For performance reasons the locale is not used since it forces a new Formatter to be created per message in a sync block
// and the locale does not affect the message printing
//
final class DebugLog {
// NOCOMMIT investigate using JDK's logging. It doesn't have any dependencies and should be plenty quick when not logging.
// NOCOMMIT there was a message about not using the Locale being faster but that violated forbidden APIs. Investigate further.
private static final String HEADER = "%tF/%tT.%tL - ";
@ -28,7 +27,7 @@ final class DebugLog {
void logMethod(Method m, Object[] args) {
long time = System.currentTimeMillis();
print.printf(HEADER + "Invoke %s#%s(%s)%n",
print.printf(Locale.ROOT, HEADER + "Invoke %s#%s(%s)%n",
time, time, time,
//m.getReturnType().getSimpleName(),
m.getDeclaringClass().getSimpleName(),
@ -40,7 +39,7 @@ final class DebugLog {
void logResult(Method m, Object[] args, Object r) {
long time = System.currentTimeMillis();
print.printf(HEADER + "%s#%s(%s) returned %s%n",
print.printf(Locale.ROOT, HEADER + "%s#%s(%s) returned %s%n",
time, time, time,
//m.getReturnType().getSimpleName(),
m.getDeclaringClass().getSimpleName(),
@ -52,7 +51,7 @@ final class DebugLog {
void logException(Method m, Object[] args, Throwable t) {
long time = System.currentTimeMillis();
print.printf(HEADER + "%s#%s(%s) threw ",
print.printf(Locale.ROOT, HEADER + "%s#%s(%s) threw ",
time, time, time,
m.getDeclaringClass().getSimpleName(),
m.getName(),

View File

@ -9,27 +9,13 @@ import java.util.Locale;
import static java.lang.String.format;
@SuppressWarnings("serial")
public class JdbcException extends RuntimeException {
public JdbcException() {
super();
}
public JdbcException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
public JdbcException(String message, Object... args) {
// NOCOMMIT we very rarely use this on new classes in core, instead appending strings.
super(format(Locale.ROOT, message, args));
}
public JdbcException(Throwable cause, String message, Object... args) {
super(format(Locale.ROOT, message, args), cause);
}
public JdbcException(Throwable cause) {
super(cause);
}
}

View File

@ -63,7 +63,7 @@ class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement {
checkOpen();
if (parameterIndex < 0 || parameterIndex > query.paramCount()) {
throw new SQLException(String.format("Invalid parameter %s; needs to be between 1 and %s", parameterIndex, query.paramCount()));
throw new SQLException("Invalid parameter [ " + parameterIndex + "; needs to be between 1 and [" + query.paramCount() + "]");
}
query.setParam(parameterIndex, value, JDBCType.valueOf(type));

View File

@ -5,6 +5,9 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.net.client.Cursor;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
@ -31,9 +34,6 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.elasticsearch.xpack.sql.jdbc.net.client.Cursor;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo;
import static java.lang.String.format;
class JdbcResultSet implements ResultSet, JdbcWrapper {
@ -66,7 +66,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper {
private Object column(int columnIndex) throws SQLException {
checkOpen();
if (columnIndex < 1 || columnIndex > cursor.columnSize()) {
throw new SQLException(String.format("Invalid column index %s", columnIndex));
throw new SQLException("Invalid column index [" + columnIndex + "]");
}
Object object = cursor.column(columnIndex - 1);
wasNull = (object == null);
@ -77,7 +77,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper {
checkOpen();
Integer index = nameToIndex.get(columnName);
if (index == null) {
throw new SQLException(String.format("Invalid column label %s", columnName));
throw new SQLException("Invalid column label [" + columnName + "]");
}
return index.intValue();
}
@ -304,7 +304,7 @@ class JdbcResultSet implements ResultSet, JdbcWrapper {
private <T> T convert(int columnIndex, Class<T> type) throws SQLException {
checkOpen();
if (columnIndex < 1 || columnIndex > cursor.columnSize()) {
throw new SQLException(String.format("Invalid column index %s", columnIndex));
throw new SQLException("Invalid column index [" + columnIndex + "]");
}
Object val = column(columnIndex);

View File

@ -149,7 +149,7 @@ class JdbcResultSetMetaData implements ResultSetMetaData, JdbcWrapper {
private final ColumnInfo column(int column) throws SQLException {
checkOpen();
if (column < 1 || column > columns.size()) {
throw new SQLException(String.format("Invalid column index %s", column));
throw new SQLException("Invalid column index [" + column + "]");
}
return columns.get(column - 1);
}

View File

@ -13,8 +13,30 @@ import java.sql.JDBCType;
import java.sql.Time;
import java.sql.Timestamp;
import static java.lang.String.format;
import static java.sql.Types.*;
import static java.sql.Types.BIGINT;
import static java.sql.Types.BINARY;
import static java.sql.Types.BIT;
import static java.sql.Types.BLOB;
import static java.sql.Types.BOOLEAN;
import static java.sql.Types.CHAR;
import static java.sql.Types.CLOB;
import static java.sql.Types.DATE;
import static java.sql.Types.DECIMAL;
import static java.sql.Types.DOUBLE;
import static java.sql.Types.FLOAT;
import static java.sql.Types.INTEGER;
import static java.sql.Types.LONGVARBINARY;
import static java.sql.Types.LONGVARCHAR;
import static java.sql.Types.NULL;
import static java.sql.Types.NUMERIC;
import static java.sql.Types.REAL;
import static java.sql.Types.SMALLINT;
import static java.sql.Types.TIME;
import static java.sql.Types.TIMESTAMP;
import static java.sql.Types.TIMESTAMP_WITH_TIMEZONE;
import static java.sql.Types.TINYINT;
import static java.sql.Types.VARBINARY;
import static java.sql.Types.VARCHAR;
public abstract class JdbcUtils {
@ -99,7 +121,7 @@ public abstract class JdbcUtils {
case TIMESTAMP_WITH_TIMEZONE:
return Long.class;
default:
throw new IllegalArgumentException(format("Unsupported JDBC type %d/%s", jdbcType, nameOf(jdbcType)));
throw new IllegalArgumentException("Unsupported JDBC type [" + jdbcType + "/" + nameOf(jdbcType) + "]");
}
}

View File

@ -27,7 +27,7 @@ import static java.util.Calendar.*;
abstract class TypeConverter {
static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
private static final long DAY_IN_MILLIS = 60 * 60 * 24;

View File

@ -134,7 +134,7 @@ public class HttpJdbcClient implements Closeable {
}
private InfoResponse fetchServerInfo() throws SQLException {
BytesArray ba = http.put(out -> ProtoUtils.write(out, new InfoRequest(System.getProperties())));
BytesArray ba = http.put(out -> ProtoUtils.write(out, new InfoRequest()));
return doIO(ba, in -> readResponse(in, Action.INFO));
}

View File

@ -131,12 +131,7 @@ public class BytesArray {
}
int newcount = size + len;
checkSize(newcount);
try {
System.arraycopy(b, off, bytes, size, len);
} catch (ArrayIndexOutOfBoundsException ex) {
System.err.println(String.format("Copying array of size %d, content %s, off %d, len %d to bytes with len %d at offset %d", b.length, new BytesArray(b), off, len, bytes.length, size));
throw ex;
}
System.arraycopy(b, off, bytes, size, len);
size = newcount;
}

View File

@ -1,65 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.util;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
public abstract class ReflectionUtils {
public static Field findField(Class<?> clazz, String name) {
return findField(clazz, name, null);
}
public static Field findField(Class<?> clazz, String name, Class<?> type) {
Assert.notNull(clazz, "Class must not be null");
Assert.isTrue(name != null || type != null, "Either name or type of the field must be specified");
Class<?> searchType = clazz;
while (!Object.class.equals(searchType) && searchType != null) {
Field[] fields = searchType.getDeclaredFields();
for (Field field : fields) {
if ((name == null || name.equals(field.getName())) && (type == null || type.equals(field.getType()))) {
return field;
}
}
searchType = searchType.getSuperclass();
}
return null;
}
public static void makeAccessible(AccessibleObject accessible) {
if (!accessible.isAccessible()) {
accessible.setAccessible(true);
}
}
@SuppressWarnings("unchecked")
public static <T> T getField(Field field, Object target) {
try {
return (T) field.get(target);
} catch (IllegalAccessException ex) {
throw new JdbcException("Unexpected reflection exception - %s: %s", ex.getClass().getName(), ex.getMessage());
}
}
public static Method findMethod(Class<?> targetClass, String name, Class<?>... paramTypes) {
while (targetClass != null) {
Method[] methods = (targetClass.isInterface() ? targetClass.getMethods() : targetClass.getDeclaredMethods());
for (Method method : methods) {
if (name.equals(method.getName())
&& (paramTypes == null || Arrays.equals(paramTypes, method.getParameterTypes()))) {
return method;
}
}
targetClass = targetClass.getSuperclass();
}
return null;
}
}

View File

@ -5,46 +5,39 @@
*/
package org.elasticsearch.sql.jdbc;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import org.junit.Test;
import static org.junit.Assert.assertThat;
import static org.hamcrest.Matchers.is;
public class ConnectionInfoTest {
public class ConnectionInfoTests extends ESTestCase {
private JdbcConfiguration ci(String url) {
return new JdbcConfiguration(url, null);
}
@Test(expected = JdbcException.class)
public void testJustThePrefix() throws Exception {
ci("jdbc:es:");
Exception e = expectThrows(JdbcException.class, () -> ci("jdbc:es:"));
assertEquals("Invalid URL jdbc:es:, format should be jdbc:es://[host[:port]]*/[prefix]*[?[option=value]&]*", e.getMessage());
}
@Test
public void testJustTheHost() throws Exception {
assertThat(ci("jdbc:es://localhost").asUrl().toString(), is("http://localhost:9200/"));
}
@Test
public void testHostAndPort() throws Exception {
assertThat(ci("jdbc:es://localhost:1234").asUrl().toString(), is("http://localhost:1234/"));
}
@Test
public void testTrailingSlashForHost() throws Exception {
assertThat(ci("jdbc:es://localhost:1234/").asUrl().toString(), is("http://localhost:1234/"));
}
@Test
public void testMultiPathSuffix() throws Exception {
assertThat(ci("jdbc:es://a:1/foo/bar/tar").asUrl().toString(), is("http://a:1/foo/bar/tar"));
}
@Test
public void testDebug() throws Exception {
JdbcConfiguration ci = ci("jdbc:es://a:1/?debug=true");
assertThat(ci.asUrl().toString(), is("http://a:1/"));
@ -52,7 +45,6 @@ public class ConnectionInfoTest {
assertThat(ci.debugOut(), is("err"));
}
@Test
public void testDebugOut() throws Exception {
JdbcConfiguration ci = ci("jdbc:es://a:1/?debug=true&debug.output=jdbc.out");
assertThat(ci.asUrl().toString(), is("http://a:1/"));
@ -60,12 +52,11 @@ public class ConnectionInfoTest {
assertThat(ci.debugOut(), is("jdbc.out"));
}
@Test(expected = JdbcException.class)
public void testTypeInParam() throws Exception {
ci("jdbc:es://a:1/foo/bar/tar?debug=true&debug.out=jdbc.out");
Exception e = expectThrows(JdbcException.class, () -> ci("jdbc:es://a:1/foo/bar/tar?debug=true&debug.out=jdbc.out"));
assertEquals("Unknown parameter [debug.out] ; did you mean [debug.output]", e.getMessage());
}
@Test
public void testDebugOutWithSuffix() throws Exception {
JdbcConfiguration ci = ci("jdbc:es://a:1/foo/bar/tar?debug=true&debug.output=jdbc.out");
assertThat(ci.asUrl().toString(), is("http://a:1/foo/bar/tar"));

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.jdbc.integration.net.protocol;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.elasticsearch.xpack.sql.jdbc.integration.server.JdbcHttpServer;
import org.elasticsearch.xpack.sql.jdbc.integration.util.JdbcTemplate;
@ -18,7 +19,6 @@ import org.elasticsearch.xpack.sql.jdbc.net.protocol.InfoResponse;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.MetaColumnInfo;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.net.InetAddress;
import java.sql.Connection;
@ -37,10 +37,10 @@ import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertThat;
public class ProtoTest {
public class ProtoTests extends ESTestCase {
// NOCOMMIT investigate switching to ESRestTestCase and making an integration test.
private static Client esClient;
private static JdbcHttpServer server;
@ -50,7 +50,7 @@ public class ProtoTest {
private static JdbcTemplate j;
@BeforeClass
public static void setUp() throws Exception {
public static void setUpServer() throws Exception {
if (esClient == null) {
esClient = new PreBuiltTransportClient(Settings.EMPTY)
.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), 9300));
@ -58,7 +58,6 @@ public class ProtoTest {
if (server == null) {
server = new JdbcHttpServer(esClient);
server.start(0);
System.out.println("Server started at " + server.address().getPort());
}
if (client == null) {
@ -71,11 +70,11 @@ public class ProtoTest {
driver = new JdbcDriver();
}
j = new JdbcTemplate(ProtoTest::con);
j = new JdbcTemplate(ProtoTests::con);
}
@AfterClass
public static void tearDown() {
public static void tearDownServer() {
if (server != null) {
server.stop();
server = null;
@ -101,12 +100,10 @@ public class ProtoTest {
return driver.connect(jdbcUrl, new Properties());
}
@Test
public void test01Ping() throws Exception {
assertThat(client.ping((int) TimeUnit.SECONDS.toMillis(5)), equalTo(true));
}
@Test
public void testInfoAction() throws Exception {
InfoResponse esInfo = client.serverInfo();
assertThat(esInfo, notNullValue());
@ -118,22 +115,20 @@ public class ProtoTest {
//assertThat(esInfo.minorVersion(), is(0));
}
@Test
public void testInfoTable() throws Exception {
List<String> tables = client.metaInfoTables("emp*");
assertThat(tables.size(), greaterThanOrEqualTo(1));
assertThat(tables, hasItem("emp.emp"));
}
@Test
public void testInfoColumn() throws Exception {
List<MetaColumnInfo> info = client.metaInfoColumns("em*", null);
for (MetaColumnInfo i : info) {
System.out.println(i);
// NOCOMMIT test these
logger.info(i);
}
}
@Test
public void testBasicJdbc() throws Exception {
j.consume(c -> {
assertThat(c.isClosed(), is(false));
@ -143,7 +138,6 @@ public class ProtoTest {
j.queryToConsole("SHOW TABLES");
}
@Test
public void testBasicSelect() throws Exception {
j.consume(c -> {
assertThat(c.isClosed(), is(false));
@ -153,18 +147,17 @@ public class ProtoTest {
j.queryToConsole("SELECT * from \"emp.emp\" ");
}
@Test(expected = RuntimeException.class)
public void testBasicDemo() throws Exception {
j.consume(c -> {
assertThat(c.isClosed(), is(false));
assertThat(c.isReadOnly(), is(true));
});
j.queryToConsole("SELECT name, postalcode, last_score, last_score_date FROM doesnot.exist");
RuntimeException e = expectThrows(RuntimeException.class, () ->
j.queryToConsole("SELECT name, postalcode, last_score, last_score_date FROM doesnot.exist"));
assertEquals("asdfasd", e.getMessage());
}
@Test
public void testMetadataGetProcedures() throws Exception {
j.consume(c -> {
DatabaseMetaData metaData = c.getMetaData();
@ -175,7 +168,6 @@ public class ProtoTest {
});
}
@Test
public void testMetadataGetProcedureColumns() throws Exception {
j.consume(c -> {
DatabaseMetaData metaData = c.getMetaData();
@ -186,7 +178,6 @@ public class ProtoTest {
});
}
@Test
public void testMetadataGetTables() throws Exception {
j.consume(c -> {
DatabaseMetaData metaData = c.getMetaData();
@ -197,14 +188,14 @@ public class ProtoTest {
});
}
@Test(expected = RuntimeException.class)
public void testMetadataColumns() throws Exception {
j.consume(c -> {
RuntimeException e = expectThrows(RuntimeException.class, () -> j.consume(c -> {
DatabaseMetaData metaData = c.getMetaData();
ResultSet results = metaData.getColumns("elasticsearch", "", "dep.dep", "%");
assertThat(results, is(notNullValue()));
assertThat(results.next(), is(true));
assertThat(results.getMetaData().getColumnCount(), is(24));
});
}));
assertEquals("adsf", e.getMessage());
}
}

View File

@ -5,9 +5,14 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.test.ESTestCase;
import org.junit.Assert;
import org.junit.Test;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.ArrayList;
@ -16,28 +21,24 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.elasticsearch.common.Strings;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runners.Parameterized.Parameter;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.integration.util.JdbcAssert.assertResultSets;
public abstract class CompareToH2BaseTest {
public abstract class CompareToH2BaseTestCase extends ESTestCase {
public final String queryName;
public final String query;
public final Integer lineNumber;
public final Path source;
@Parameter(0)
public String queryName;
@Parameter(1)
public String query;
@Parameter(2)
public Integer lineNumber;
@Parameter(3)
public Path source;
public CompareToH2BaseTestCase(String queryName, String query, Integer lineNumber, Path source) {
this.queryName = queryName;
this.query = query;
this.lineNumber = lineNumber;
this.source = source;
}
protected static List<Object[]> readScriptSpec(String url) throws Exception {
Path source = Paths.get(CompareToH2BaseTest.class.getResource(url).toURI());
Path source = PathUtils.get(CompareToH2BaseTestCase.class.getResource(url).toURI());
List<String> lines = Files.readAllLines(source);
Map<String, Integer> testNames = new LinkedHashMap<>();
@ -73,9 +74,7 @@ public abstract class CompareToH2BaseTest {
return pairs;
}
@Test
public void testQuery() throws Throwable {
// H2 resultset
try (Connection h2 = QuerySuite.h2Con().get();
Connection es = QuerySuite.esCon().get()) {
ResultSet expected, actual;

View File

@ -5,14 +5,13 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.nio.file.Path;
@RunWith(Parameterized.class)
public class DebugSpecTest extends CompareToH2BaseTest {
public class DebugSpecTests extends CompareToH2BaseTestCase {
public DebugSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@Parameters(name = "test{0}")
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/debug.spec");
}

View File

@ -8,10 +8,10 @@ package org.elasticsearch.xpack.sql.jdbc.integration.query;
import java.sql.Connection;
import java.util.function.Supplier;
import org.elasticsearch.xpack.sql.jdbc.integration.query.filter.FilterSpecTest;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.aggregate.AggSpecTest;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.datetime.DateTimeSpecTest;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.math.MathSpecTest;
import org.elasticsearch.xpack.sql.jdbc.integration.query.filter.FilterSpecTests;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.aggregate.AggSpecTests;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.datetime.DateTimeSpecTests;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.math.MathSpecTests;
import org.elasticsearch.xpack.sql.jdbc.integration.util.EsDataLoader;
import org.elasticsearch.xpack.sql.jdbc.integration.util.EsJdbcServer;
import org.elasticsearch.xpack.sql.jdbc.integration.util.H2;
@ -25,12 +25,13 @@ import org.junit.runners.Suite.SuiteClasses;
import static org.junit.Assert.assertNotNull;
@RunWith(Suite.class)
@SuiteClasses({ SelectSpecTest.class, FilterSpecTest.class, AggSpecTest.class, MathSpecTest.class, DateTimeSpecTest.class })
@SuiteClasses({ SelectSpecTests.class, FilterSpecTests.class, AggSpecTests.class, MathSpecTests.class, DateTimeSpecTests.class })
//@SuiteClasses({ DebugSpecTest.class })
//@SuiteClasses({ AggSpecTest.class })
//@SuiteClasses({ DateTimeSpecTest.class })
//@SuiteClasses({ MathSpecTest.class })
public class QuerySuite {
// NOCOMMIT we don't have suites in core so this is hard to figure out
//
// REMOTE ACCESS

View File

@ -5,15 +5,13 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTest;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.nio.file.Path;
@RunWith(Parameterized.class)
public class SelectSpecTest extends CompareToH2BaseTest {
public class SelectSpecTests extends CompareToH2BaseTestCase {
public SelectSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@Parameters(name = "test{0}")
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/select/select.spec");
}

View File

@ -5,15 +5,15 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.filter;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTest;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
@RunWith(Parameterized.class)
public class FilterSpecTest extends CompareToH2BaseTest {
import java.nio.file.Path;
public class FilterSpecTests extends CompareToH2BaseTestCase {
public FilterSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@Parameters(name = "test{0}")
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/filter/filter.spec");
}

View File

@ -5,15 +5,18 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.function.aggregate;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTest;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
@RunWith(Parameterized.class)
public class AggSpecTest extends CompareToH2BaseTest {
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
@Parameters(name = "test{0}")
import java.nio.file.Path;
public class AggSpecTests extends CompareToH2BaseTestCase {
public AggSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/function/aggregate/agg.spec");
}

View File

@ -5,15 +5,15 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.datetime;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTest;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
@RunWith(Parameterized.class)
public class DateTimeSpecTest extends CompareToH2BaseTest {
import java.nio.file.Path;
public class DateTimeSpecTests extends CompareToH2BaseTestCase {
public DateTimeSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@Parameters(name = "test{0}")
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/function/scalar/datetime/datetime.spec");
}

View File

@ -5,15 +5,15 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.math;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTest;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
@RunWith(Parameterized.class)
public class MathSpecTest extends CompareToH2BaseTest {
import java.nio.file.Path;
public class MathSpecTests extends CompareToH2BaseTestCase {
public MathSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@Parameters(name = "test{0}")
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/function/scalar/math/math.spec");
}

View File

@ -5,15 +5,15 @@
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.select;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTest;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
@RunWith(Parameterized.class)
public class SelectSpecTest extends CompareToH2BaseTest {
import java.nio.file.Path;
public class SelectSpecTests extends CompareToH2BaseTestCase {
public SelectSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@Parameters(name = "test{0}")
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/select/select.spec");
}

View File

@ -11,6 +11,7 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
@ -18,25 +19,21 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import java.util.Random;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.ESTestCase.between;
import static org.junit.Assert.fail;
// used rarely just to load the data (hence why it's marked as abstract)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
// NOCOMMIT we should set this up to run with the tests if we need it
public abstract class EsDataLoader {
private static final Logger log = ESLoggerFactory.getLogger(EsDataLoader.class.getName());
@ -261,9 +258,9 @@ public abstract class EsDataLoader {
log.info("About to parse and load the employee to department nested datasets");
// read the 3 files and do nested-loop joins in memory before sending the data out
List<String> deps = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> emp = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
List<String> deps = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
String[] dCols = { "dept_no", "dept_name" };
@ -358,9 +355,9 @@ public abstract class EsDataLoader {
log.info("About to parse and load the department to employee nested datasets");
// read the 3 files and do nested-loop joins in memory before sending the data out
List<String> deps = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> employees = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
List<String> deps = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> employees = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
String[] dCols = { "dept_no", "dept_name" };
@ -461,9 +458,9 @@ public abstract class EsDataLoader {
log.info("About to parse and load the department to employee nested datasets");
// read the 3 files and do nested-loop joins in memory before sending the data out
List<String> deps = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> employees = Files.readAllLines(Paths.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
List<String> deps = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> employees = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
String[] dCols = { "dept_no", "dept_name" };
@ -471,7 +468,6 @@ public abstract class EsDataLoader {
BulkRequestBuilder brb = client().prepareBulk();
Random rnd = new Random();
employees.forEach(emp -> {
try {
String[] eSplit = emp.split(",");
@ -485,8 +481,7 @@ public abstract class EsDataLoader {
for (int i = 0; i < eSplit.length; i++) {
sourceBuilder.field(empCol[i], eSplit[i]);
}
// salary (random between 38000 and 106000)
sourceBuilder.field("salary", rnd.nextInt(106000 - 38000 + 1) + 38000);
sourceBuilder.field("salary", between(38000, 106000));
sourceBuilder.startArray("dep");
@ -549,7 +544,7 @@ public abstract class EsDataLoader {
BulkRequestBuilder brb = client().prepareBulk();
try (Stream<String> stream = Files.lines(Paths.get(dataSet.toURI()))) {
try (Stream<String> stream = Files.lines(PathUtils.get(dataSet.toURI()))) {
stream.forEach(s -> {
try {
XContentBuilder sourceBuilder = jsonBuilder().startObject();
@ -573,8 +568,4 @@ public abstract class EsDataLoader {
log.info("Dataset loaded in {}", br.getTook().format());
}
@Test
public void testNoOp() {}
}

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.jdbc.integration.util;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.xpack.sql.net.client.SuppressForbidden;
import java.sql.Connection;
import java.sql.PreparedStatement;
@ -45,7 +46,9 @@ public class JdbcTemplate {
private static final int MAX_WIDTH = 20;
@SuppressForbidden(reason="temporary")
public static CheckedFunction<ResultSet, Void, SQLException> resultSetToConsole() {
// NOCOMMIT this doesn't really test anything. If we want to log the whole result set we can do that too, but we have to add assertions
return rs -> {
ResultSetMetaData metaData = rs.getMetaData();
StringBuilder sb = new StringBuilder();