diff --git a/core/src/main/java/org/apache/druid/java/util/common/ISE.java b/core/src/main/java/org/apache/druid/java/util/common/ISE.java index 512e7b594c6..17164ff9b1d 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/ISE.java +++ b/core/src/main/java/org/apache/druid/java/util/common/ISE.java @@ -19,9 +19,14 @@ package org.apache.druid.java.util.common; +import com.google.common.base.Strings; +import org.apache.druid.common.exception.SanitizableException; + +import java.util.function.Function; + /** */ -public class ISE extends IllegalStateException +public class ISE extends IllegalStateException implements SanitizableException { public ISE(String formatText, Object... arguments) { @@ -32,4 +37,15 @@ public class ISE extends IllegalStateException { super(StringUtils.nonStrictFormat(formatText, arguments), cause); } + + @Override + public Exception sanitize(Function errorMessageTransformFunction) + { + String transformedErrorMessage = errorMessageTransformFunction.apply(getMessage()); + if (Strings.isNullOrEmpty(transformedErrorMessage)) { + return new ISE(""); + } else { + return new ISE(transformedErrorMessage); + } + } } diff --git a/core/src/main/java/org/apache/druid/java/util/common/UOE.java b/core/src/main/java/org/apache/druid/java/util/common/UOE.java index c50db548a14..f38ef23d08c 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/UOE.java +++ b/core/src/main/java/org/apache/druid/java/util/common/UOE.java @@ -19,13 +19,28 @@ package org.apache.druid.java.util.common; +import com.google.common.base.Strings; +import org.apache.druid.common.exception.SanitizableException; + +import java.util.function.Function; + /** */ -public class UOE extends UnsupportedOperationException +public class UOE extends UnsupportedOperationException implements SanitizableException { public UOE(String formatText, Object... arguments) { super(StringUtils.nonStrictFormat(formatText, arguments)); } + @Override + public Exception sanitize(Function errorMessageTransformFunction) + { + String transformedErrorMessage = errorMessageTransformFunction.apply(getMessage()); + if (Strings.isNullOrEmpty(transformedErrorMessage)) { + return new UOE(""); + } else { + return new UOE(transformedErrorMessage); + } + } } diff --git a/docs/configuration/index.md b/docs/configuration/index.md index e1a49b3a332..2afe3f9dab3 100644 --- a/docs/configuration/index.md +++ b/docs/configuration/index.md @@ -648,6 +648,7 @@ You can use an error response transform strategy to transform error responses fr When you specify an error response transform strategy other than `none`, Druid transforms the error responses from Druid services as follows: - For any query API that fails in the Router service, Druid sets the fields `errorClass` and `host` to null. Druid applies the transformation strategy to the `errorMessage` field. - For any SQL query API that fails, for example `POST /druid/v2/sql/...`, Druid sets the fields `errorClass` and `host` to null. Druid applies the transformation strategy to the `errorMessage` field. + - For any JDBC related exceptions, Druid will turn all checked exceptions into `QueryInterruptedException` otherwise druid will attempt to keep the exception as the same type. For example if the original exception isn't owned by Druid it will become `QueryInterruptedException`. Druid applies the transformation strategy to the `errorMessage` field. ###### No error response transform strategy diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 690757f7e31..8c2227eedb8 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -47,7 +47,7 @@ public class ITBasicAuthConfigurationTest extends AbstractAuthConfigurationTest private static final String BASIC_AUTHENTICATOR = "basic"; private static final String BASIC_AUTHORIZER = "basic"; - private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: BasicSecurityAuthenticationException: User metadata store authentication failed."; + private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User metadata store authentication failed. -> BasicSecurityAuthenticationException: User metadata store authentication failed."; private HttpClient druid99; diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java index 97a7c531756..5adc9bbbf85 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java @@ -53,7 +53,7 @@ public class ITBasicAuthLdapConfigurationTest extends AbstractAuthConfigurationT private static final String LDAP_AUTHENTICATOR = "ldap"; private static final String LDAP_AUTHORIZER = "ldapauth"; - private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: BasicSecurityAuthenticationException: User LDAP authentication failed."; + private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: QueryInterruptedException: User LDAP authentication failed. -> BasicSecurityAuthenticationException: User LDAP authentication failed."; @Inject IntegrationTestingConfig config; diff --git a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java index 646c0e5855d..ff2196e1193 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java +++ b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java @@ -65,7 +65,7 @@ public class ServerConfig boolean enableForwardedRequestCustomizer, @NotNull List allowedHttpMethods, boolean showDetailedJettyErrors, - ErrorResponseTransformStrategy errorResponseTransformStrategy + @NotNull ErrorResponseTransformStrategy errorResponseTransformStrategy ) { this.numThreads = numThreads; diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java index 79e6b2946e9..07557e36d07 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java @@ -63,7 +63,11 @@ public class DruidConnection @GuardedBy("connectionLock") private boolean open = true; - public DruidConnection(final String connectionId, final int maxStatements, final Map context) + public DruidConnection( + final String connectionId, + final int maxStatements, + final Map context + ) { this.connectionId = Preconditions.checkNotNull(connectionId); this.maxStatements = maxStatements; diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java index 40920b16e00..6dba621b75c 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidMeta.java @@ -38,6 +38,7 @@ import org.apache.calcite.avatica.remote.TypedValue; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.server.security.AuthenticationResult; import org.apache.druid.server.security.Authenticator; @@ -64,15 +65,31 @@ import java.util.concurrent.atomic.AtomicInteger; public class DruidMeta extends MetaImpl { + /** + * Logs any throwable and string format message with args at the error level. + * + * @param error the Throwable to be logged + * @param message the message to be logged. Can be in string format structure + * @param format the format arguments for the format message string + * @param any type that extends throwable + * @return the original Throwable + */ public static T logFailure(T error, String message, Object... format) { - LOG.error(message, format); + LOG.error(error, message, format); return error; } + /** + * Logs any throwable at the error level with the throwables message. + * + * @param error the throwable to be logged + * @param any type that extends throwable + * @return the original Throwable + */ public static T logFailure(T error) { - LOG.error(error, error.getMessage()); + logFailure(error, error.getMessage()); return error; } @@ -82,8 +99,11 @@ public class DruidMeta extends MetaImpl private final ScheduledExecutorService exec; private final AvaticaServerConfig config; private final List authenticators; + private final ErrorHandler errorHandler; - /** Used to track logical connections. */ + /** + * Used to track logical connections. + */ private final ConcurrentMap connections = new ConcurrentHashMap<>(); /** @@ -96,12 +116,14 @@ public class DruidMeta extends MetaImpl public DruidMeta( final SqlLifecycleFactory sqlLifecycleFactory, final AvaticaServerConfig config, + final ErrorHandler errorHandler, final Injector injector ) { super(null); this.sqlLifecycleFactory = Preconditions.checkNotNull(sqlLifecycleFactory, "sqlLifecycleFactory"); this.config = config; + this.errorHandler = errorHandler; this.exec = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder() .setNameFormat(StringUtils.format("DruidMeta@%s-ScheduledExecutor", Integer.toHexString(hashCode()))) @@ -116,41 +138,75 @@ public class DruidMeta extends MetaImpl @Override public void openConnection(final ConnectionHandle ch, final Map info) { - // Build connection context. - final ImmutableMap.Builder context = ImmutableMap.builder(); - if (info != null) { - for (Map.Entry entry : info.entrySet()) { - context.put(entry); + try { + // Build connection context. + final ImmutableMap.Builder context = ImmutableMap.builder(); + if (info != null) { + for (Map.Entry entry : info.entrySet()) { + context.put(entry); + } } + // we don't want to stringify arrays for JDBC ever because avatica needs to handle this + context.put(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false); + openDruidConnection(ch.id, context.build()); + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + // we want to avoid sanitizing avatica specific exceptions as the avatica code can rely on them to handle issues + // differently + throw errorHandler.sanitize(t); } - // we don't want to stringify arrays for JDBC ever because avatica needs to handle this - context.put(PlannerContext.CTX_SQL_STRINGIFY_ARRAYS, false); - openDruidConnection(ch.id, context.build()); } @Override public void closeConnection(final ConnectionHandle ch) { - final DruidConnection druidConnection = connections.remove(ch.id); - if (druidConnection != null) { - connectionCount.decrementAndGet(); - druidConnection.close(); + try { + final DruidConnection druidConnection = connections.remove(ch.id); + if (druidConnection != null) { + connectionCount.decrementAndGet(); + druidConnection.close(); + } + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); } } @Override public ConnectionProperties connectionSync(final ConnectionHandle ch, final ConnectionProperties connProps) { - // getDruidConnection re-syncs it. - getDruidConnection(ch.id); - return connProps; + try { + // getDruidConnection re-syncs it. + getDruidConnection(ch.id); + return connProps; + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @Override public StatementHandle createStatement(final ConnectionHandle ch) { - final DruidStatement druidStatement = getDruidConnection(ch.id).createStatement(sqlLifecycleFactory); - return new StatementHandle(ch.id, druidStatement.getStatementId(), null); + try { + final DruidStatement druidStatement = getDruidConnection(ch.id).createStatement(sqlLifecycleFactory); + return new StatementHandle(ch.id, druidStatement.getStatementId(), null); + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @Override @@ -160,26 +216,34 @@ public class DruidMeta extends MetaImpl final long maxRowCount ) { - final StatementHandle statement = createStatement(ch); - final DruidStatement druidStatement; try { - druidStatement = getDruidStatement(statement); + final StatementHandle statement = createStatement(ch); + final DruidStatement druidStatement; + try { + druidStatement = getDruidStatement(statement); + } + catch (NoSuchStatementException e) { + throw logFailure(new ISE(e, e.getMessage())); + } + final DruidConnection druidConnection = getDruidConnection(statement.connectionId); + AuthenticationResult authenticationResult = authenticateConnection(druidConnection); + if (authenticationResult == null) { + throw logFailure( + new ForbiddenException("Authentication failed."), + "Authentication failed for statement[%s]", + druidStatement.getStatementId() + ); + } + statement.signature = druidStatement.prepare(sql, maxRowCount, authenticationResult).getSignature(); + LOG.debug("Successfully prepared statement[%s] for execution", druidStatement.getStatementId()); + return statement; } - catch (NoSuchStatementException e) { - throw logFailure(new IllegalStateException(e)); + catch (NoSuchConnectionException e) { + throw e; } - final DruidConnection druidConnection = getDruidConnection(statement.connectionId); - AuthenticationResult authenticationResult = authenticateConnection(druidConnection); - if (authenticationResult == null) { - throw logFailure( - new ForbiddenException("Authentication failed."), - "Authentication failed for statement[%s]", - druidStatement.getStatementId() - ); + catch (Throwable t) { + throw errorHandler.sanitize(t); } - statement.signature = druidStatement.prepare(sql, maxRowCount, authenticationResult).getSignature(); - LOG.debug("Successfully prepared statement[%s] for execution", druidStatement.getStatementId()); - return statement; } @Deprecated @@ -192,7 +256,7 @@ public class DruidMeta extends MetaImpl ) { // Avatica doesn't call this. - throw new UnsupportedOperationException("Deprecated"); + throw errorHandler.sanitize(new UOE("Deprecated")); } @Override @@ -204,36 +268,45 @@ public class DruidMeta extends MetaImpl final PrepareCallback callback ) throws NoSuchStatementException { - // Ignore "callback", this class is designed for use with LocalService which doesn't use it. - final DruidStatement druidStatement = getDruidStatement(statement); - final DruidConnection druidConnection = getDruidConnection(statement.connectionId); - AuthenticationResult authenticationResult = authenticateConnection(druidConnection); - if (authenticationResult == null) { - throw logFailure( - new ForbiddenException("Authentication failed."), - "Authentication failed for statement[%s]", - druidStatement.getStatementId() + try { + // Ignore "callback", this class is designed for use with LocalService which doesn't use it. + final DruidStatement druidStatement = getDruidStatement(statement); + final DruidConnection druidConnection = getDruidConnection(statement.connectionId); + AuthenticationResult authenticationResult = authenticateConnection(druidConnection); + if (authenticationResult == null) { + throw logFailure( + new ForbiddenException("Authentication failed."), + "Authentication failed for statement[%s]", + druidStatement.getStatementId() + ); + } + druidStatement.prepare(sql, maxRowCount, authenticationResult); + final Frame firstFrame = druidStatement.execute(Collections.emptyList()) + .nextFrame( + DruidStatement.START_OFFSET, + getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) + ); + final Signature signature = druidStatement.getSignature(); + LOG.debug("Successfully prepared statement[%s] and started execution", druidStatement.getStatementId()); + return new ExecuteResult( + ImmutableList.of( + MetaResultSet.create( + statement.connectionId, + statement.id, + false, + signature, + firstFrame + ) + ) ); } - druidStatement.prepare(sql, maxRowCount, authenticationResult); - final Frame firstFrame = druidStatement.execute(Collections.emptyList()) - .nextFrame( - DruidStatement.START_OFFSET, - getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) - ); - final Signature signature = druidStatement.getSignature(); - LOG.debug("Successfully prepared statement[%s] and started execution", druidStatement.getStatementId()); - return new ExecuteResult( - ImmutableList.of( - MetaResultSet.create( - statement.connectionId, - statement.id, - false, - signature, - firstFrame - ) - ) - ); + // cannot affect these exceptions as avatica handles them + catch (NoSuchConnectionException | NoSuchStatementException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @Override @@ -243,7 +316,7 @@ public class DruidMeta extends MetaImpl ) { // Batch statements are used for bulk updates, but we don't support updates. - throw new UnsupportedOperationException("Batch statements not supported"); + throw errorHandler.sanitize(new UOE("Batch statements not supported")); } @Override @@ -253,7 +326,7 @@ public class DruidMeta extends MetaImpl ) { // Batch statements are used for bulk updates, but we don't support updates. - throw new UnsupportedOperationException("Batch statements not supported"); + throw errorHandler.sanitize(new UOE("Batch statements not supported")); } @Override @@ -263,9 +336,17 @@ public class DruidMeta extends MetaImpl final int fetchMaxRowCount ) throws NoSuchStatementException, MissingResultsException { - final int maxRows = getEffectiveMaxRowsPerFrame(fetchMaxRowCount); - LOG.debug("Fetching next frame from offset[%s] with [%s] rows for statement[%s]", offset, maxRows, statement.id); - return getDruidStatement(statement).nextFrame(offset, maxRows); + try { + final int maxRows = getEffectiveMaxRowsPerFrame(fetchMaxRowCount); + LOG.debug("Fetching next frame from offset[%s] with [%s] rows for statement[%s]", offset, maxRows, statement.id); + return getDruidStatement(statement).nextFrame(offset, maxRows); + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @Deprecated @@ -277,7 +358,7 @@ public class DruidMeta extends MetaImpl ) { // Avatica doesn't call this. - throw new UnsupportedOperationException("Deprecated"); + throw errorHandler.sanitize(new UOE("Deprecated")); } @Override @@ -287,26 +368,34 @@ public class DruidMeta extends MetaImpl final int maxRowsInFirstFrame ) throws NoSuchStatementException { - final DruidStatement druidStatement = getDruidStatement(statement); - final Frame firstFrame = druidStatement.execute(parameterValues) - .nextFrame( - DruidStatement.START_OFFSET, - getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) - ); + try { + final DruidStatement druidStatement = getDruidStatement(statement); + final Frame firstFrame = druidStatement.execute(parameterValues) + .nextFrame( + DruidStatement.START_OFFSET, + getEffectiveMaxRowsPerFrame(maxRowsInFirstFrame) + ); - final Signature signature = druidStatement.getSignature(); - LOG.debug("Successfully started execution of statement[%s]", druidStatement.getStatementId()); - return new ExecuteResult( - ImmutableList.of( - MetaResultSet.create( - statement.connectionId, - statement.id, - false, - signature, - firstFrame - ) - ) - ); + final Signature signature = druidStatement.getSignature(); + LOG.debug("Successfully started execution of statement[%s]", druidStatement.getStatementId()); + return new ExecuteResult( + ImmutableList.of( + MetaResultSet.create( + statement.connectionId, + statement.id, + false, + signature, + firstFrame + ) + ) + ); + } + catch (NoSuchStatementException | NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @Override @@ -325,14 +414,22 @@ public class DruidMeta extends MetaImpl @Override public void closeStatement(final StatementHandle h) { - // connections.get, not getDruidConnection, since we want to silently ignore nonexistent statements - final DruidConnection druidConnection = connections.get(h.connectionId); - if (druidConnection != null) { - final DruidStatement druidStatement = druidConnection.getStatement(h.id); - if (druidStatement != null) { - druidStatement.close(); + try { + // connections.get, not getDruidConnection, since we want to silently ignore nonexistent statements + final DruidConnection druidConnection = connections.get(h.connectionId); + if (druidConnection != null) { + final DruidStatement druidStatement = druidConnection.getStatement(h.id); + if (druidStatement != null) { + druidStatement.close(); + } } } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @Override @@ -342,13 +439,25 @@ public class DruidMeta extends MetaImpl final long offset ) throws NoSuchStatementException { - final DruidStatement druidStatement = getDruidStatement(sh); - final boolean isDone = druidStatement.isDone(); - final long currentOffset = druidStatement.getCurrentOffset(); - if (currentOffset != offset) { - throw logFailure(new ISE("Requested offset[%,d] does not match currentOffset[%,d]", offset, currentOffset)); + try { + final DruidStatement druidStatement = getDruidStatement(sh); + final boolean isDone = druidStatement.isDone(); + final long currentOffset = druidStatement.getCurrentOffset(); + if (currentOffset != offset) { + throw logFailure(new ISE( + "Requested offset[%,d] does not match currentOffset[%,d]", + offset, + currentOffset + )); + } + return !isDone; + } + catch (NoSuchStatementException | NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); } - return !isDone; } @Override @@ -372,14 +481,22 @@ public class DruidMeta extends MetaImpl @Override public MetaResultSet getCatalogs(final ConnectionHandle ch) { - final String sql = "SELECT\n" - + " DISTINCT CATALOG_NAME AS TABLE_CAT\n" - + "FROM\n" - + " INFORMATION_SCHEMA.SCHEMATA\n" - + "ORDER BY\n" - + " TABLE_CAT\n"; + try { + final String sql = "SELECT\n" + + " DISTINCT CATALOG_NAME AS TABLE_CAT\n" + + "FROM\n" + + " INFORMATION_SCHEMA.SCHEMATA\n" + + "ORDER BY\n" + + " TABLE_CAT\n"; - return sqlResultSet(ch, sql); + return sqlResultSet(ch, sql); + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @Override @@ -389,26 +506,34 @@ public class DruidMeta extends MetaImpl final Pat schemaPattern ) { - final List whereBuilder = new ArrayList<>(); - if (catalog != null) { - whereBuilder.add("SCHEMATA.CATALOG_NAME = " + Calcites.escapeStringLiteral(catalog)); + try { + final List whereBuilder = new ArrayList<>(); + if (catalog != null) { + whereBuilder.add("SCHEMATA.CATALOG_NAME = " + Calcites.escapeStringLiteral(catalog)); + } + + if (schemaPattern.s != null) { + whereBuilder.add("SCHEMATA.SCHEMA_NAME LIKE " + withEscapeClause(schemaPattern.s)); + } + + final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder); + final String sql = "SELECT\n" + + " SCHEMA_NAME AS TABLE_SCHEM,\n" + + " CATALOG_NAME AS TABLE_CATALOG\n" + + "FROM\n" + + " INFORMATION_SCHEMA.SCHEMATA\n" + + where + "\n" + + "ORDER BY\n" + + " TABLE_CATALOG, TABLE_SCHEM\n"; + + return sqlResultSet(ch, sql); } - - if (schemaPattern.s != null) { - whereBuilder.add("SCHEMATA.SCHEMA_NAME LIKE " + withEscapeClause(schemaPattern.s)); + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); } - - final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder); - final String sql = "SELECT\n" - + " SCHEMA_NAME AS TABLE_SCHEM,\n" - + " CATALOG_NAME AS TABLE_CATALOG\n" - + "FROM\n" - + " INFORMATION_SCHEMA.SCHEMATA\n" - + where + "\n" - + "ORDER BY\n" - + " TABLE_CATALOG, TABLE_SCHEM\n"; - - return sqlResultSet(ch, sql); } @Override @@ -420,46 +545,54 @@ public class DruidMeta extends MetaImpl final List typeList ) { - final List whereBuilder = new ArrayList<>(); - if (catalog != null) { - whereBuilder.add("TABLES.TABLE_CATALOG = " + Calcites.escapeStringLiteral(catalog)); - } - - if (schemaPattern.s != null) { - whereBuilder.add("TABLES.TABLE_SCHEMA LIKE " + withEscapeClause(schemaPattern.s)); - } - - if (tableNamePattern.s != null) { - whereBuilder.add("TABLES.TABLE_NAME LIKE " + withEscapeClause(tableNamePattern.s)); - } - - if (typeList != null) { - final List escapedTypes = new ArrayList<>(); - for (String type : typeList) { - escapedTypes.add(Calcites.escapeStringLiteral(type)); + try { + final List whereBuilder = new ArrayList<>(); + if (catalog != null) { + whereBuilder.add("TABLES.TABLE_CATALOG = " + Calcites.escapeStringLiteral(catalog)); } - whereBuilder.add("TABLES.TABLE_TYPE IN (" + Joiner.on(", ").join(escapedTypes) + ")"); + + if (schemaPattern.s != null) { + whereBuilder.add("TABLES.TABLE_SCHEMA LIKE " + withEscapeClause(schemaPattern.s)); + } + + if (tableNamePattern.s != null) { + whereBuilder.add("TABLES.TABLE_NAME LIKE " + withEscapeClause(tableNamePattern.s)); + } + + if (typeList != null) { + final List escapedTypes = new ArrayList<>(); + for (String type : typeList) { + escapedTypes.add(Calcites.escapeStringLiteral(type)); + } + whereBuilder.add("TABLES.TABLE_TYPE IN (" + Joiner.on(", ").join(escapedTypes) + ")"); + } + + final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder); + final String sql = "SELECT\n" + + " TABLE_CATALOG AS TABLE_CAT,\n" + + " TABLE_SCHEMA AS TABLE_SCHEM,\n" + + " TABLE_NAME AS TABLE_NAME,\n" + + " TABLE_TYPE AS TABLE_TYPE,\n" + + " CAST(NULL AS VARCHAR) AS REMARKS,\n" + + " CAST(NULL AS VARCHAR) AS TYPE_CAT,\n" + + " CAST(NULL AS VARCHAR) AS TYPE_SCHEM,\n" + + " CAST(NULL AS VARCHAR) AS TYPE_NAME,\n" + + " CAST(NULL AS VARCHAR) AS SELF_REFERENCING_COL_NAME,\n" + + " CAST(NULL AS VARCHAR) AS REF_GENERATION\n" + + "FROM\n" + + " INFORMATION_SCHEMA.TABLES\n" + + where + "\n" + + "ORDER BY\n" + + " TABLE_TYPE, TABLE_CAT, TABLE_SCHEM, TABLE_NAME\n"; + + return sqlResultSet(ch, sql); + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); } - - final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder); - final String sql = "SELECT\n" - + " TABLE_CATALOG AS TABLE_CAT,\n" - + " TABLE_SCHEMA AS TABLE_SCHEM,\n" - + " TABLE_NAME AS TABLE_NAME,\n" - + " TABLE_TYPE AS TABLE_TYPE,\n" - + " CAST(NULL AS VARCHAR) AS REMARKS,\n" - + " CAST(NULL AS VARCHAR) AS TYPE_CAT,\n" - + " CAST(NULL AS VARCHAR) AS TYPE_SCHEM,\n" - + " CAST(NULL AS VARCHAR) AS TYPE_NAME,\n" - + " CAST(NULL AS VARCHAR) AS SELF_REFERENCING_COL_NAME,\n" - + " CAST(NULL AS VARCHAR) AS REF_GENERATION\n" - + "FROM\n" - + " INFORMATION_SCHEMA.TABLES\n" - + where + "\n" - + "ORDER BY\n" - + " TABLE_TYPE, TABLE_CAT, TABLE_SCHEM, TABLE_NAME\n"; - - return sqlResultSet(ch, sql); } @Override @@ -471,70 +604,86 @@ public class DruidMeta extends MetaImpl final Pat columnNamePattern ) { - final List whereBuilder = new ArrayList<>(); - if (catalog != null) { - whereBuilder.add("COLUMNS.TABLE_CATALOG = " + Calcites.escapeStringLiteral(catalog)); + try { + final List whereBuilder = new ArrayList<>(); + if (catalog != null) { + whereBuilder.add("COLUMNS.TABLE_CATALOG = " + Calcites.escapeStringLiteral(catalog)); + } + + if (schemaPattern.s != null) { + whereBuilder.add("COLUMNS.TABLE_SCHEMA LIKE " + withEscapeClause(schemaPattern.s)); + } + + if (tableNamePattern.s != null) { + whereBuilder.add("COLUMNS.TABLE_NAME LIKE " + withEscapeClause(tableNamePattern.s)); + } + + if (columnNamePattern.s != null) { + whereBuilder.add("COLUMNS.COLUMN_NAME LIKE " + + withEscapeClause(columnNamePattern.s)); + } + + final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder); + final String sql = "SELECT\n" + + " TABLE_CATALOG AS TABLE_CAT,\n" + + " TABLE_SCHEMA AS TABLE_SCHEM,\n" + + " TABLE_NAME AS TABLE_NAME,\n" + + " COLUMN_NAME AS COLUMN_NAME,\n" + + " CAST(JDBC_TYPE AS INTEGER) AS DATA_TYPE,\n" + + " DATA_TYPE AS TYPE_NAME,\n" + + " -1 AS COLUMN_SIZE,\n" + + " -1 AS BUFFER_LENGTH,\n" + + " -1 AS DECIMAL_DIGITS,\n" + + " -1 AS NUM_PREC_RADIX,\n" + + " CASE IS_NULLABLE WHEN 'YES' THEN 1 ELSE 0 END AS NULLABLE,\n" + + " CAST(NULL AS VARCHAR) AS REMARKS,\n" + + " COLUMN_DEFAULT AS COLUMN_DEF,\n" + + " -1 AS SQL_DATA_TYPE,\n" + + " -1 AS SQL_DATETIME_SUB,\n" + + " -1 AS CHAR_OCTET_LENGTH,\n" + + " CAST(ORDINAL_POSITION AS INTEGER) AS ORDINAL_POSITION,\n" + + " IS_NULLABLE AS IS_NULLABLE,\n" + + " CAST(NULL AS VARCHAR) AS SCOPE_CATALOG,\n" + + " CAST(NULL AS VARCHAR) AS SCOPE_SCHEMA,\n" + + " CAST(NULL AS VARCHAR) AS SCOPE_TABLE,\n" + + " -1 AS SOURCE_DATA_TYPE,\n" + + " 'NO' AS IS_AUTOINCREMENT,\n" + + " 'NO' AS IS_GENERATEDCOLUMN\n" + + "FROM\n" + + " INFORMATION_SCHEMA.COLUMNS\n" + + where + "\n" + + "ORDER BY\n" + + " TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION\n"; + + return sqlResultSet(ch, sql); } - - if (schemaPattern.s != null) { - whereBuilder.add("COLUMNS.TABLE_SCHEMA LIKE " + withEscapeClause(schemaPattern.s)); + catch (NoSuchConnectionException e) { + throw e; } - - if (tableNamePattern.s != null) { - whereBuilder.add("COLUMNS.TABLE_NAME LIKE " + withEscapeClause(tableNamePattern.s)); + catch (Throwable t) { + throw errorHandler.sanitize(t); } - - if (columnNamePattern.s != null) { - whereBuilder.add("COLUMNS.COLUMN_NAME LIKE " - + withEscapeClause(columnNamePattern.s)); - } - - final String where = whereBuilder.isEmpty() ? "" : "WHERE " + Joiner.on(" AND ").join(whereBuilder); - final String sql = "SELECT\n" - + " TABLE_CATALOG AS TABLE_CAT,\n" - + " TABLE_SCHEMA AS TABLE_SCHEM,\n" - + " TABLE_NAME AS TABLE_NAME,\n" - + " COLUMN_NAME AS COLUMN_NAME,\n" - + " CAST(JDBC_TYPE AS INTEGER) AS DATA_TYPE,\n" - + " DATA_TYPE AS TYPE_NAME,\n" - + " -1 AS COLUMN_SIZE,\n" - + " -1 AS BUFFER_LENGTH,\n" - + " -1 AS DECIMAL_DIGITS,\n" - + " -1 AS NUM_PREC_RADIX,\n" - + " CASE IS_NULLABLE WHEN 'YES' THEN 1 ELSE 0 END AS NULLABLE,\n" - + " CAST(NULL AS VARCHAR) AS REMARKS,\n" - + " COLUMN_DEFAULT AS COLUMN_DEF,\n" - + " -1 AS SQL_DATA_TYPE,\n" - + " -1 AS SQL_DATETIME_SUB,\n" - + " -1 AS CHAR_OCTET_LENGTH,\n" - + " CAST(ORDINAL_POSITION AS INTEGER) AS ORDINAL_POSITION,\n" - + " IS_NULLABLE AS IS_NULLABLE,\n" - + " CAST(NULL AS VARCHAR) AS SCOPE_CATALOG,\n" - + " CAST(NULL AS VARCHAR) AS SCOPE_SCHEMA,\n" - + " CAST(NULL AS VARCHAR) AS SCOPE_TABLE,\n" - + " -1 AS SOURCE_DATA_TYPE,\n" - + " 'NO' AS IS_AUTOINCREMENT,\n" - + " 'NO' AS IS_GENERATEDCOLUMN\n" - + "FROM\n" - + " INFORMATION_SCHEMA.COLUMNS\n" - + where + "\n" - + "ORDER BY\n" - + " TABLE_CAT, TABLE_SCHEM, TABLE_NAME, ORDINAL_POSITION\n"; - - return sqlResultSet(ch, sql); } @Override public MetaResultSet getTableTypes(final ConnectionHandle ch) { - final String sql = "SELECT\n" - + " DISTINCT TABLE_TYPE AS TABLE_TYPE\n" - + "FROM\n" - + " INFORMATION_SCHEMA.TABLES\n" - + "ORDER BY\n" - + " TABLE_TYPE\n"; + try { + final String sql = "SELECT\n" + + " DISTINCT TABLE_TYPE AS TABLE_TYPE\n" + + "FROM\n" + + " INFORMATION_SCHEMA.TABLES\n" + + "ORDER BY\n" + + " TABLE_TYPE\n"; - return sqlResultSet(ch, sql); + return sqlResultSet(ch, sql); + } + catch (NoSuchConnectionException e) { + throw e; + } + catch (Throwable t) { + throw errorHandler.sanitize(t); + } } @VisibleForTesting @@ -585,7 +734,11 @@ public class DruidMeta extends MetaImpl if (connectionCount.get() > config.getMaxConnections()) { // We aren't going to make a connection after all. connectionCount.decrementAndGet(); - throw logFailure(new ISE("Too many connections, limit is[%,d]", config.getMaxConnections())); + throw logFailure( + new ISE("Too many connections"), + "Too many connections, limit is[%,d] per broker", + config.getMaxConnections() + ); } } @@ -610,9 +763,7 @@ public class DruidMeta extends MetaImpl * Get a connection, or throw an exception if it doesn't exist. Also refreshes the timeout timer. * * @param connectionId connection id - * * @return the connection - * * @throws NoSuchConnectionException if the connection id doesn't exist */ @Nonnull @@ -654,7 +805,7 @@ public class DruidMeta extends MetaImpl final ExecuteResult result = prepareAndExecute(statement, sql, -1, -1, null); final MetaResultSet metaResultSet = Iterables.getOnlyElement(result.resultSets); if (!metaResultSet.firstFrame.done) { - throw logFailure(new ISE("Expected all results to be in a single frame!")); + throw new ISE("Expected all results to be in a single frame!"); } return metaResultSet; } @@ -671,11 +822,11 @@ public class DruidMeta extends MetaImpl * {@link java.sql.ResultSet}. This value corresponds to {@link java.sql.Statement#setFetchSize(int)} (which is a user * hint, we don't have to honor it), and this method modifies it, ensuring the actual chosen value falls within * {@link AvaticaServerConfig#minRowsPerFrame} and {@link AvaticaServerConfig#maxRowsPerFrame}. - * + *

* A value of -1 supplied as input indicates that the client has no preference for fetch size, and can handle * unlimited results (at our discretion). Similarly, a value of -1 for {@link AvaticaServerConfig#maxRowsPerFrame} * also indicates that there is no upper limit on fetch size on the server side. - * + *

* {@link AvaticaServerConfig#minRowsPerFrame} must be configured to a value greater than 0, because it will be * checked against if any additional frames are required (which means one of the input or maximum was set to a value * other than -1). diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/ErrorHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/ErrorHandler.java new file mode 100644 index 00000000000..62ab3c21940 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/avatica/ErrorHandler.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.avatica; + +import com.google.inject.Inject; +import org.apache.druid.common.exception.ErrorResponseTransformStrategy; +import org.apache.druid.common.exception.NoErrorResponseTransformStrategy; +import org.apache.druid.common.exception.SanitizableException; +import org.apache.druid.java.util.common.ISE; +import org.apache.druid.java.util.common.UOE; +import org.apache.druid.query.QueryException; +import org.apache.druid.query.QueryInterruptedException; +import org.apache.druid.server.initialization.ServerConfig; +import org.apache.druid.server.security.ForbiddenException; + + +/** + * ErrorHandler is a utilty class that is used to sanitize exceptions. + */ +class ErrorHandler +{ + private final ErrorResponseTransformStrategy errorResponseTransformStrategy; + + @Inject + ErrorHandler(final ServerConfig serverConfig) + { + this.errorResponseTransformStrategy = serverConfig.getErrorResponseTransformStrategy(); + } + + /** + * Sanitizes a Throwable. If it's a runtime exception and it's cause is sanitizable it will sanitize that cause and + * return that cause as a sanitized RuntimeException. This will do best effort to keep original exception type. If + * it's a checked exception that will be turned into a QueryInterruptedException. + * + * @param error The Throwable to be sanitized + * @param Any class that extends Throwable + * @return The sanitized Throwable + */ + public RuntimeException sanitize(T error) + { + if (error instanceof QueryException) { + return (QueryException) errorResponseTransformStrategy.transformIfNeeded((QueryException) error); + } + if (error instanceof ForbiddenException) { + return (ForbiddenException) errorResponseTransformStrategy.transformIfNeeded((ForbiddenException) error); + } + if (error instanceof ISE) { + return (ISE) errorResponseTransformStrategy.transformIfNeeded((ISE) error); + } + if (error instanceof UOE) { + return (UOE) errorResponseTransformStrategy.transformIfNeeded((UOE) error); + } + // catch any non explicit sanitizable exceptions + if (error instanceof SanitizableException) { + return new RuntimeException(errorResponseTransformStrategy.transformIfNeeded((SanitizableException) error)); + } + // cannot check cause of the throwable because it cannot be cast back to the original's type + // so this only checks runtime exceptions for causes + if (error instanceof RuntimeException && error.getCause() instanceof SanitizableException) { + // could do `throw sanitize(error);` but just sanitizing immediatley avoids unnecessary going down multiple levels + return new RuntimeException(errorResponseTransformStrategy.transformIfNeeded((SanitizableException) error.getCause())); + } + QueryInterruptedException wrappedError = QueryInterruptedException.wrapIfNeeded(error); + return (QueryInterruptedException) errorResponseTransformStrategy.transformIfNeeded(wrappedError); + } + + /** + * Check to see if something needs to be sanitized. + *

+ * This does this by checking to see if the ErrorResponse is different than a NoOp Error response transform strategy. + * + * @return a boolean that returns true if error handler has an error response strategy other than the NoOp error + * response strategy + */ + public boolean hasAffectingErrorResponseTransformStrategy() + { + return !errorResponseTransformStrategy.equals(NoErrorResponseTransformStrategy.INSTANCE); + } +} diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index a51fe0a53d0..d767eff8665 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -53,6 +53,7 @@ import org.apache.druid.server.QueryScheduler; import org.apache.druid.server.QuerySchedulerProvider; import org.apache.druid.server.QueryStackTests; import org.apache.druid.server.RequestLogLine; +import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.server.log.RequestLogger; import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; @@ -834,7 +835,7 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase clientNoTrailingSlash.createStatement(); expectedException.expect(AvaticaClientRuntimeException.class); - expectedException.expectMessage("Too many connections, limit is[4]"); + expectedException.expectMessage("Too many connections"); final Connection connection5 = DriverManager.getConnection(url); } @@ -899,6 +900,7 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase ) ), smallFrameConfig, + new ErrorHandler(new ServerConfig()), injector ) { @@ -988,6 +990,7 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase ) ), smallFrameConfig, + new ErrorHandler(new ServerConfig()), injector ) { diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/ErrorHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/ErrorHandlerTest.java new file mode 100644 index 00000000000..68b7703551c --- /dev/null +++ b/sql/src/test/java/org/apache/druid/sql/avatica/ErrorHandlerTest.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.avatica; + +import com.google.common.collect.ImmutableList; +import org.apache.druid.common.exception.AllowedRegexErrorResponseTransformStrategy; +import org.apache.druid.query.QueryException; +import org.apache.druid.query.QueryInterruptedException; +import org.apache.druid.server.initialization.ServerConfig; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +public class ErrorHandlerTest +{ + + @Test + public void testErrorHandlerSanitizesErrorAsExpected() + { + ServerConfig serverConfig = Mockito.mock(ServerConfig.class); + AllowedRegexErrorResponseTransformStrategy emptyAllowedRegexErrorResponseTransformStrategy = new AllowedRegexErrorResponseTransformStrategy( + ImmutableList.of()); + + Mockito.when(serverConfig.getErrorResponseTransformStrategy()) + .thenReturn(emptyAllowedRegexErrorResponseTransformStrategy); + ErrorHandler errorHandler = new ErrorHandler(serverConfig); + QueryException input = new QueryException("error", "error message", "error class", "host"); + + RuntimeException output = errorHandler.sanitize(input); + Assert.assertNull(output.getMessage()); + } + + @Test + public void testErrorHandlerDefaultErrorResponseTransformStrategySanitizesErrorAsExpected() + { + ServerConfig serverConfig = new ServerConfig(); + ErrorHandler errorHandler = new ErrorHandler(serverConfig); + QueryInterruptedException input = new QueryInterruptedException("error", "error messagez", "error class", "host"); + + RuntimeException output = errorHandler.sanitize(input); + Assert.assertEquals("error messagez", output.getMessage()); + } + + @Test + public void testErrorHandlerHasAffectingErrorResponseTransformStrategyReturnsTrueWhenNotUsingNoErrorResponseTransformStrategy() + { + ServerConfig serverConfig = Mockito.mock(ServerConfig.class); + AllowedRegexErrorResponseTransformStrategy emptyAllowedRegexErrorResponseTransformStrategy = new AllowedRegexErrorResponseTransformStrategy( + ImmutableList.of()); + + Mockito.when(serverConfig.getErrorResponseTransformStrategy()) + .thenReturn(emptyAllowedRegexErrorResponseTransformStrategy); + ErrorHandler errorHandler = new ErrorHandler(serverConfig); + Assert.assertTrue(errorHandler.hasAffectingErrorResponseTransformStrategy()); + } + + @Test + public void testErrorHandlerHasAffectingErrorResponseTransformStrategyReturnsFalseWhenUsingNoErrorResponseTransformStrategy() + { + ServerConfig serverConfig = new ServerConfig(); + ErrorHandler errorHandler = new ErrorHandler(serverConfig); + Assert.assertFalse(errorHandler.hasAffectingErrorResponseTransformStrategy()); + } +}