Make JDBC driver throw only SQLException (elastic/x-pack-elasticsearch#2708)

A JDBC driver should throw only checked SQLExceptions.
Introduce JdbcSQLException and fix some no-commits along the way.

Original commit: elastic/x-pack-elasticsearch@299fcf9ace
This commit is contained in:
Costin Leau 2017-10-12 12:27:19 +03:00 committed by GitHub
parent 7f0c44f138
commit e3d072aeea
26 changed files with 137 additions and 95 deletions

View File

@ -15,9 +15,7 @@ import java.sql.JDBCType;
public class ProtoUtils {
// See Jdbc spec, appendix B
@SuppressWarnings("unchecked")
public static <T> T readValue(DataInput in, JDBCType type) throws IOException {
// NOCOMMIT <T> feels slippery here
public static Object readValue(DataInput in, JDBCType type) throws IOException {
Object result;
byte hasNext = in.readByte();
if (hasNext == 0) { // TODO feels like a bitmask at the start of the row would be better.
@ -74,7 +72,7 @@ public class ProtoUtils {
default:
throw new IOException("Don't know how to read type [" + type + "]");
}
return (T) result;
return result;
}
public static void writeValue(DataOutput out, Object o, JDBCType type) throws IOException {

View File

@ -37,7 +37,7 @@ public class QueryInitResponse extends AbstractQueryResponse {
columns.add(new ColumnInfo(in));
}
this.columns = unmodifiableList(columns);
// NOCOMMIT - Page is a client class, it shouldn't leak here
// TODO - Page is a client class, it shouldn't leak here
Page data = new Page(columns);
data.readFrom(in);
this.data = data;

View File

@ -3,9 +3,10 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
package org.elasticsearch.xpack.sql.jdbc;
public class JdbcException extends RuntimeException {
public JdbcException(String message) {
super(message);
}

View File

@ -0,0 +1,19 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc;
import java.sql.SQLException;
public class JdbcSQLException extends SQLException {
public JdbcSQLException(String message) {
super(message);
}
public JdbcSQLException(Throwable cause, String message) {
super(message, cause);
}
}

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.sql.jdbc.debug;
import org.elasticsearch.xpack.sql.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.net.client.SuppressForbidden;
import java.io.OutputStreamWriter;

View File

@ -14,9 +14,6 @@ import java.util.Locale;
// Logging is done through PrintWriter (not PrintStream which maps to System.err/out) to plug into the JDBC API
final class DebugLog {
// NOCOMMIT investigate using JDK's logging. It doesn't have any dependencies and should be plenty quick when not logging.
// NOCOMMIT there was a message about not using the Locale being faster but that violated forbidden APIs. Investigate further.
private static final String HEADER = "%tF/%tT.%tL - ";
final PrintWriter print;

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.sql.jdbc.debug;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
abstract class DebuggingInvoker implements InvocationHandler {
private final Object target;
@ -60,7 +60,7 @@ abstract class DebuggingInvoker implements InvocationHandler {
} catch (Exception ex) {
// should not occur
log.logException(method, args, ex);
throw new JdbcException(ex, "Debugging failed for [" + method + "]");
throw new JdbcSQLException(ex, "Debugging failed for [" + method + "]");
}
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import org.elasticsearch.xpack.sql.net.client.ConnectionConfiguration;
import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
@ -57,7 +58,7 @@ public class JdbcConfiguration extends ConnectionConfiguration {
private String debugOut = DEBUG_OUTPUT_DEFAULT;
private final TimeZone timeZone;
public JdbcConfiguration(String u, Properties props) {
public JdbcConfiguration(String u, Properties props) throws JdbcSQLException {
super(props);
originalUrl = u;
parseUrl(u);
@ -68,11 +69,11 @@ public class JdbcConfiguration extends ConnectionConfiguration {
timeZone = TimeZone.getTimeZone(settings().getProperty(TIME_ZONE, TIME_ZONE_DEFAULT));
}
private void parseUrl(String u) {
private void parseUrl(String u) throws JdbcSQLException {
String url = u;
String format = "jdbc:es://[host[:port]]*/[prefix]*[?[option=value]&]*";
if (!canAccept(u)) {
throw new JdbcException("Expected [" + URL_PREFIX + "] url, received [" + u +"]");
throw new JdbcSQLException("Expected [" + URL_PREFIX + "] url, received [" + u +"]");
}
try {
@ -89,7 +90,7 @@ public class JdbcConfiguration extends ConnectionConfiguration {
u = u.substring(URL_PREFIX.length(), u.length());
if (!u.startsWith("//")) {
throw new JdbcException("Invalid URL [" + url + "], format should be [" + format + "]");
throw new JdbcSQLException("Invalid URL [" + url + "], format should be [" + format + "]");
}
// remove //
@ -105,7 +106,7 @@ public class JdbcConfiguration extends ConnectionConfiguration {
int pIndex = u.indexOf("?");
if (pIndex > 0) {
if (index < 0) {
throw new JdbcException("Invalid URL [" + url + "], format should be [" + format + "]");
throw new JdbcSQLException("Invalid URL [" + url + "], format should be [" + format + "]");
}
if (pIndex + 1 < u.length()) {
params = u.substring(pIndex + 1);
@ -135,7 +136,7 @@ public class JdbcConfiguration extends ConnectionConfiguration {
index = hostAndPort.lastIndexOf(":");
if (index > 0) {
if (index + 1 >= hostAndPort.length()) {
throw new JdbcException("Invalid port specified");
throw new JdbcSQLException("Invalid port specified");
}
String host = hostAndPort.substring(0, index);
String port = hostAndPort.substring(index + 1);
@ -154,18 +155,18 @@ public class JdbcConfiguration extends ConnectionConfiguration {
for (String param : prms) {
List<String> args = StringUtils.tokenize(param, "=");
if (args.size() != 2) {
throw new JdbcException("Invalid parameter [" + param + "], format needs to be key=value");
throw new JdbcSQLException("Invalid parameter [" + param + "], format needs to be key=value");
}
String pName = args.get(0);
if (!KNOWN_OPTIONS.contains(pName)) {
throw new JdbcException("Unknown parameter [" + pName + "] ; did you mean " +
throw new JdbcSQLException("Unknown parameter [" + pName + "] ; did you mean " +
StringUtils.findSimiliar(pName, KNOWN_OPTIONS));
}
settings().setProperty(args.get(0), args.get(1));
}
}
} catch (JdbcException e) {
} catch (JdbcSQLException e) {
throw e;
} catch (Exception e) {
// Add the url to unexpected exceptions
@ -173,12 +174,12 @@ public class JdbcConfiguration extends ConnectionConfiguration {
}
}
public URL asUrl() {
public URL asUrl() throws JdbcSQLException {
// TODO: need to assemble all the various params here
try {
return new URL(isSSLEnabled() ? "https" : "http", hostAndPort.ip, port(), urlFile);
} catch (MalformedURLException ex) {
throw new JdbcException(ex, "Cannot connect to server [" + originalUrl + "]");
throw new JdbcSQLException(ex, "Cannot connect to server [" + originalUrl + "]");
}
}

View File

@ -5,9 +5,14 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.debug.Debug;
import org.elasticsearch.xpack.sql.jdbc.net.client.JdbcHttpClient;
import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.ClientInfoStatus;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
@ -27,9 +32,7 @@ import java.util.Properties;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import org.elasticsearch.xpack.sql.jdbc.debug.Debug;
import org.elasticsearch.xpack.sql.jdbc.net.client.JdbcHttpClient;
import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
import static java.util.Collections.singletonMap;
/**
* Implementation of {@link Connection} for Elasticsearch.
@ -45,7 +48,7 @@ public class JdbcConnection implements Connection, JdbcWrapper {
private String schema;
private Properties clientInfo = new Properties();
public JdbcConnection(JdbcConfiguration connectionInfo) {
public JdbcConnection(JdbcConfiguration connectionInfo) throws SQLException {
cfg = connectionInfo;
client = new JdbcHttpClient(connectionInfo);
@ -335,7 +338,7 @@ public class JdbcConnection implements Connection, JdbcWrapper {
public void setClientInfo(String name, String value) throws SQLClientInfoException {
checkOpenClientInfo();
if (!StringUtils.hasText(name)) {
throw new JdbcException("Invalid/Empty name given");
throw new SQLClientInfoException(singletonMap(name, ClientInfoStatus.REASON_VALUE_INVALID));
}
if (value != null) {
clientInfo.put(name, value);

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import org.elasticsearch.xpack.sql.jdbc.net.client.Cursor;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ColumnInfo;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.MetaColumnInfo;
@ -1184,7 +1185,7 @@ class JdbcDatabaseMetaData implements DatabaseMetaData, JdbcWrapper {
return false;
}
private static List<ColumnInfo> columnInfo(String tableName, Object... cols) {
private static List<ColumnInfo> columnInfo(String tableName, Object... cols) throws JdbcSQLException {
List<ColumnInfo> columns = new ArrayList<>();
for (int i = 0; i < cols.length; i++) {
@ -1203,13 +1204,13 @@ class JdbcDatabaseMetaData implements DatabaseMetaData, JdbcWrapper {
columns.add(new ColumnInfo(name, type, tableName, "INFORMATION_SCHEMA", EMPTY, EMPTY, 0));
}
else {
throw new JdbcException("Invalid metadata schema definition");
throw new JdbcSQLException("Invalid metadata schema definition");
}
}
return columns;
}
private static ResultSet emptySet(JdbcConfiguration cfg, String tableName, Object... cols) {
private static ResultSet emptySet(JdbcConfiguration cfg, String tableName, Object... cols) throws JdbcSQLException {
return new JdbcResultSet(cfg, null, new InMemoryCursor(columnInfo(tableName, cols), null));
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import org.elasticsearch.xpack.sql.jdbc.debug.Debug;
import org.elasticsearch.xpack.sql.jdbc.util.Version;
@ -24,31 +25,34 @@ public class JdbcDriver implements java.sql.Driver, Closeable {
private static final JdbcDriver INSTANCE = new JdbcDriver();
static {
register();
}
public static JdbcDriver register() {
try {
DriverManager.registerDriver(INSTANCE, INSTANCE::close);
register();
} catch (SQLException ex) {
// the SQLException is bogus as there's no source for it
PrintWriter writer = DriverManager.getLogWriter();
if (writer != null) {
ex.printStackTrace(writer);
}
throw new ExceptionInInitializerError(ex);
}
}
public static JdbcDriver register() throws SQLException {
DriverManager.registerDriver(INSTANCE, INSTANCE::close);
return INSTANCE;
}
public static void deregister() {
public static void deregister() throws SQLException {
try {
DriverManager.deregisterDriver(INSTANCE);
} catch (SQLException ex) {
// the SQLException is bogus as there's no source for it
// but we handle it just in case
PrintWriter writer = DriverManager.getLogWriter();
if (writer != null) {
ex.printStackTrace(writer);
}
throw ex;
}
}
@ -64,6 +68,9 @@ public class JdbcDriver implements java.sql.Driver, Closeable {
// Jdbc 4.0
//
public Connection connect(String url, Properties props) throws SQLException {
if (url == null) {
throw new JdbcSQLException("Non-null url required");
}
if (!acceptsURL(url)) {
return null;
}
@ -73,7 +80,7 @@ public class JdbcDriver implements java.sql.Driver, Closeable {
return cfg.debug() ? Debug.proxy(cfg, con, DriverManager.getLogWriter()) : con;
}
private static JdbcConfiguration initCfg(String url, Properties props) {
private static JdbcConfiguration initCfg(String url, Properties props) throws JdbcSQLException {
JdbcConfiguration ci = new JdbcConfiguration(url, props);
// if there's a timeout set on the DriverManager, make sure to use it

View File

@ -32,7 +32,7 @@ import java.util.Calendar;
class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement {
final PreparedQuery query;
JdbcPreparedStatement(JdbcConnection con, JdbcConfiguration info, String sql) {
JdbcPreparedStatement(JdbcConnection con, JdbcConfiguration info, String sql) throws SQLException {
super(con, info);
this.query = PreparedQuery.prepare(sql);
}

View File

@ -73,7 +73,12 @@ class JdbcResultSet implements ResultSet, JdbcWrapper {
if (columnIndex < 1 || columnIndex > cursor.columnSize()) {
throw new SQLException("Invalid column index [" + columnIndex + "]");
}
Object object = cursor.column(columnIndex - 1);
Object object = null;
try {
object = cursor.column(columnIndex - 1);
} catch (IllegalArgumentException iae) {
throw new SQLException(iae.getMessage());
}
wasNull = (object == null);
return object;
}

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import java.math.BigDecimal;
import java.sql.Blob;
import java.sql.Clob;
@ -72,7 +74,7 @@ public abstract class JdbcUtils {
return wrapperClass;
}
public static int fromClass(Class<?> clazz) {
public static int fromClass(Class<?> clazz) throws JdbcSQLException {
if (clazz == null) {
return NULL;
}
@ -125,12 +127,12 @@ public abstract class JdbcUtils {
return DECIMAL;
}
throw new JdbcException("Unrecognized class [" + clazz + "]");
throw new JdbcSQLException("Unrecognized class [" + clazz + "]");
}
// see javax.sql.rowset.RowSetMetaDataImpl
// and https://db.apache.org/derby/docs/10.5/ref/rrefjdbc20377.html
public static Class<?> classOf(int jdbcType) {
public static Class<?> classOf(int jdbcType) throws JdbcSQLException {
switch (jdbcType) {
case NUMERIC:
@ -173,7 +175,7 @@ public abstract class JdbcUtils {
case TIMESTAMP_WITH_TIMEZONE:
return Long.class;
default:
throw new JdbcException("Unsupported JDBC type " + jdbcType + ", " + type(jdbcType).getName() + "");
throw new JdbcSQLException("Unsupported JDBC type " + jdbcType + ", " + type(jdbcType).getName() + "");
}
}

View File

@ -5,7 +5,11 @@
*/
package org.elasticsearch.xpack.sql.jdbc.jdbc;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import java.sql.JDBCType;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.ArrayList;
import java.util.List;
@ -30,16 +34,16 @@ class PreparedQuery {
clearParams();
}
ParamInfo getParam(int param) {
ParamInfo getParam(int param) throws JdbcSQLException {
if (param < 1 || param > params.length) {
throw new JdbcException("Invalid parameter index [" + param + "]");
throw new JdbcSQLException("Invalid parameter index [" + param + "]");
}
return params[param - 1];
}
void setParam(int param, Object value, JDBCType type) {
void setParam(int param, Object value, JDBCType type) throws JdbcSQLException {
if (param < 1 || param > params.length) {
throw new JdbcException("Invalid parameter index [" + param + "]");
throw new JdbcSQLException("Invalid parameter index [" + param + "]");
}
params[param - 1].value = value;
params[param - 1].type = type;
@ -74,7 +78,7 @@ class PreparedQuery {
// Find the ? parameters for binding
// Additionally, throw away all JDBC escaping
static PreparedQuery prepare(String sql) {
static PreparedQuery prepare(String sql) throws SQLException {
int l = sql.length();
List<String> fragments = new ArrayList<>();
@ -128,8 +132,8 @@ class PreparedQuery {
return new PreparedQuery(fragments);
}
private static void jdbcEscape() {
throw new JdbcException("JDBC escaping not supported yet");
private static void jdbcEscape() throws SQLException {
throw new SQLFeatureNotSupportedException("JDBC escaping not supported yet");
}
@ -146,7 +150,7 @@ class PreparedQuery {
return i;
}
private static int multiLineComment(int i, String sql, StringBuilder current) {
private static int multiLineComment(int i, String sql, StringBuilder current) throws JdbcSQLException {
int block = 1;
for (; i < sql.length() - 1; i++) {
@ -168,10 +172,10 @@ class PreparedQuery {
return i;
}
}
throw new JdbcException("Cannot parse given sql; unclosed /* comment");
throw new JdbcSQLException("Cannot parse given sql; unclosed /* comment");
}
private static int string(int i, String sql, StringBuilder current, char q) {
private static int string(int i, String sql, StringBuilder current, char q) throws JdbcSQLException {
current.append(sql.charAt(i++));
for (; i < sql.length(); i++) {
char c = sql.charAt(i);
@ -189,7 +193,7 @@ class PreparedQuery {
current.append(c);
}
}
throw new JdbcException("Cannot parse given sql; unclosed string");
throw new JdbcSQLException("Cannot parse given sql; unclosed string");
}
static String escapeString(String s) {

View File

@ -92,7 +92,7 @@ public class JdbcDataSource implements DataSource, Wrapper, Closeable {
return doGetConnection(p);
}
private Connection doGetConnection(Properties p) {
private Connection doGetConnection(Properties p) throws SQLException {
JdbcConfiguration cfg = new JdbcConfiguration(url, p);
if (loginTimeout > 0) {
cfg.connectTimeout(TimeUnit.SECONDS.toMillis(loginTimeout));

View File

@ -5,8 +5,9 @@
*/
package org.elasticsearch.xpack.sql.jdbc.net.client;
import org.elasticsearch.xpack.sql.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.jdbc.util.BytesArray;
import org.elasticsearch.xpack.sql.net.client.ClientException;
import org.elasticsearch.xpack.sql.net.client.JreHttpUrlConnection;
@ -27,7 +28,7 @@ class HttpClient {
private final JdbcConfiguration cfg;
private final URL url;
HttpClient(JdbcConfiguration connectionInfo) {
HttpClient(JdbcConfiguration connectionInfo) throws SQLException {
this.cfg = connectionInfo;
URL baseUrl = connectionInfo.asUrl();
try {
@ -36,7 +37,7 @@ class HttpClient {
this.url = new URL(baseUrl, "_sql/jdbc?error_trace=true");
} catch (MalformedURLException ex) {
throw new JdbcException(ex, "Cannot connect to JDBC endpoint [" + baseUrl.toString() + "_sql/jdbc]");
}
}
}
void setNetworkTimeout(long millis) {
@ -47,16 +48,16 @@ class HttpClient {
return cfg.networkTimeout();
}
boolean head() {
boolean head() throws JdbcSQLException {
try {
URL root = new URL(url, "/");
return AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> {
return JreHttpUrlConnection.http(root, cfg, JreHttpUrlConnection::head);
});
} catch (MalformedURLException ex) {
throw new JdbcException(ex, "Cannot ping server");
throw new JdbcSQLException(ex, "Cannot ping server");
} catch (ClientException ex) {
throw new JdbcException(ex, "Transport failure");
throw new JdbcSQLException(ex, "Transport failure");
}
}
@ -68,7 +69,7 @@ class HttpClient {
});
});
} catch (ClientException ex) {
throw new JdbcException(ex, "Transport failure");
throw new JdbcSQLException(ex, "Transport failure");
}
}

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.sql.jdbc.net.client;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ErrorResponse;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.ExceptionResponse;
import org.elasticsearch.xpack.sql.jdbc.net.protocol.InfoRequest;
@ -47,17 +47,17 @@ public class JdbcHttpClient implements Closeable {
private final JdbcConfiguration conCfg;
private InfoResponse serverInfo;
public JdbcHttpClient(JdbcConfiguration conCfg) {
public JdbcHttpClient(JdbcConfiguration conCfg) throws SQLException {
http = new HttpClient(conCfg);
this.conCfg = conCfg;
}
public boolean ping(long timeoutInMs) {
public boolean ping(long timeoutInMs) throws SQLException {
long oldTimeout = http.getNetworkTimeout();
try {
// this works since the connection is single-threaded and its configuration not shared
// with others connections
http.setNetworkTimeout(timeoutInMs);
http.setNetworkTimeout(timeoutInMs);
return http.head();
} finally {
http.setNetworkTimeout(oldTimeout);
@ -123,7 +123,7 @@ public class JdbcHttpClient implements Closeable {
try (DataInputStream in = new DataInputStream(new FastByteArrayInputStream(ba))) {
return action.apply(in);
} catch (IOException ex) {
throw new JdbcException(ex, "Cannot read response");
throw new JdbcSQLException(ex, "Cannot read response");
}
}
@ -137,7 +137,7 @@ public class JdbcHttpClient implements Closeable {
if (response.responseType() == ResponseType.ERROR) {
ErrorResponse error = (ErrorResponse) response;
// TODO: this could be made configurable to switch between message to error
throw new JdbcException("Server returned error: [" + error.stack + "]");
throw new JdbcSQLException("Server returned error: [" + error.stack + "]");
}
return response;
}

View File

@ -5,12 +5,14 @@
*/
package org.elasticsearch.xpack.sql.jdbc.util;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import org.elasticsearch.xpack.sql.jdbc.JdbcSQLException;
import java.sql.SQLException;
// taken from org.apache.lucene.util
abstract class ArrayUtils {
static byte[] grow(byte[] array, int minSize) {
static byte[] grow(byte[] array, int minSize) throws SQLException {
assert minSize >= 0 : "size must be positive (got " + minSize + "): likely integer overflow?";
if (array.length < minSize) {
byte[] newArray = new byte[oversize(minSize, 1)];
@ -20,11 +22,11 @@ abstract class ArrayUtils {
else return array;
}
static int oversize(int minTargetSize, int bytesPerElement) {
static int oversize(int minTargetSize, int bytesPerElement) throws SQLException {
if (minTargetSize < 0) {
// catch usage that accidentally overflows int
throw new JdbcException("invalid array size [" + minTargetSize + "]");
throw new JdbcSQLException("invalid array size [" + minTargetSize + "]");
}
if (minTargetSize == 0) {

View File

@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.net.client.util.StringUtils;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
public class BytesArray {
@ -98,25 +99,25 @@ public class BytesArray {
size = 0;
}
public void copyTo(BytesArray to) {
public void copyTo(BytesArray to) throws SQLException {
to.add(bytes, offset, size);
}
public void add(int b) {
public void add(int b) throws SQLException {
int newcount = size + 1;
checkSize(newcount);
bytes[size] = (byte) b;
size = newcount;
}
public void add(byte[] b) {
public void add(byte[] b) throws SQLException {
if (b == null || b.length == 0) {
return;
}
add(b, 0, b.length);
}
public void add(byte[] b, int off, int len) {
public void add(byte[] b, int off, int len) throws SQLException {
if (len == 0) {
return;
}
@ -126,14 +127,14 @@ public class BytesArray {
size = newcount;
}
public void add(String string) {
public void add(String string) throws SQLException {
if (string == null) {
return;
}
add(string.getBytes(StandardCharsets.UTF_8));
}
private void checkSize(int newcount) {
private void checkSize(int newcount) throws SQLException {
if (newcount > bytes.length) {
bytes = ArrayUtils.grow(bytes, newcount);
}

View File

@ -7,18 +7,19 @@ package org.elasticsearch.xpack.sql.jdbc;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcException;
import java.sql.SQLException;
import static org.hamcrest.Matchers.is;
public class JdbcConfigurationTests extends ESTestCase {
private JdbcConfiguration ci(String url) {
private JdbcConfiguration ci(String url) throws SQLException {
return new JdbcConfiguration(url, null);
}
public void testJustThePrefix() throws Exception {
Exception e = expectThrows(JdbcException.class, () -> ci("jdbc:es:"));
Exception e = expectThrows(JdbcSQLException.class, () -> ci("jdbc:es:"));
assertEquals("Invalid URL [jdbc:es:], format should be [jdbc:es://[host[:port]]*/[prefix]*[?[option=value]&]*]", e.getMessage());
}
@ -53,7 +54,7 @@ public class JdbcConfigurationTests extends ESTestCase {
}
public void testTypeInParam() throws Exception {
Exception e = expectThrows(JdbcException.class, () -> ci("jdbc:es://a:1/foo/bar/tar?debug=true&debug.out=jdbc.out"));
Exception e = expectThrows(JdbcSQLException.class, () -> ci("jdbc:es://a:1/foo/bar/tar?debug=true&debug.out=jdbc.out"));
assertEquals("Unknown parameter [debug.out] ; did you mean [debug.output]", e.getMessage());
}

View File

@ -28,7 +28,7 @@ public class SqlResponse extends ActionResponse implements ToXContentObject {
private long size;
private int columnCount;
private List<ColumnInfo> columns;
// NOCOMMIT investigate reusing Page here - it probably is much more efficient
// TODO investigate reusing Page here - it probably is much more efficient
private List<List<Object>> rows;
public SqlResponse() {

View File

@ -61,7 +61,7 @@ public class GroupByColumnAgg extends GroupingAgg {
}
@Override
protected GroupByColumnAgg clone(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
protected GroupByColumnAgg copy(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
return new GroupByColumnAgg(id, propertyPath, fieldName, subAggs, subPipelines, order);
}
}

View File

@ -68,7 +68,7 @@ public class GroupByDateAgg extends GroupingAgg {
}
@Override
protected GroupingAgg clone(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
protected GroupingAgg copy(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
return new GroupByDateAgg(id, propertyPath, fieldName, interval, timeZone, subAggs, subPipelines, order);
}
}

View File

@ -30,7 +30,7 @@ public abstract class GroupingAgg extends Agg {
}
@Override
protected GroupingAgg clone(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
protected GroupingAgg copy(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order) {
throw new SqlIllegalArgumentException("Default group cannot be cloned");
}
};
@ -74,15 +74,15 @@ public abstract class GroupingAgg extends Agg {
public GroupingAgg withAggs(List<LeafAgg> subAggs) {
return clone(id(), propertyPath(), fieldName(), subAggs, subPipelines, order);
return copy(id(), propertyPath(), fieldName(), subAggs, subPipelines, order);
}
public GroupingAgg withPipelines(List<PipelineAgg> subPipelines) {
return clone(id(), propertyPath(), fieldName(), subAggs, subPipelines, order);
return copy(id(), propertyPath(), fieldName(), subAggs, subPipelines, order);
}
public GroupingAgg with(String id) {
return Objects.equals(id(), id) ? this : clone(id, propertyPath(), fieldName(), subAggs, subPipelines, order);
return Objects.equals(id(), id) ? this : copy(id, propertyPath(), fieldName(), subAggs, subPipelines, order);
}
public GroupingAgg with(Direction order) {
@ -95,11 +95,10 @@ public abstract class GroupingAgg extends Agg {
}
Map<String, Direction> newOrder = new LinkedHashMap<>(this.order);
newOrder.put(leafAggId, order);
return clone(id(), propertyPath(), fieldName(), subAggs, subPipelines, newOrder);
return copy(id(), propertyPath(), fieldName(), subAggs, subPipelines, newOrder);
}
// NOCOMMIT clone is a scary name.
protected abstract GroupingAgg clone(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order);
protected abstract GroupingAgg copy(String id, String propertyPath, String fieldName, List<LeafAgg> subAggs, List<PipelineAgg> subPipelines, Map<String, Direction> order);
@Override
public int hashCode() {

View File

@ -47,6 +47,6 @@ public class SqlSettings {
}
public int pageSize() {
return cfg.getAsInt(PAGE_SIZE, 100);
return cfg.getAsInt(PAGE_SIZE, PAGE_SIZE_DEFAULT);
}
}