Enhancements to the Calcite test framework (#13283)

* Enhancements to the Calcite test framework
* Standardize "Unauthorized" messages
* Additional test framework extension points
* Resolved joinable factory dependency issue
This commit is contained in:
Paul Rogers 2022-11-08 14:28:49 -08:00 committed by GitHub
parent 9f7fd57a69
commit 7e600d2c63
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 476 additions and 191 deletions

View File

@ -41,6 +41,7 @@ import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
@ -81,7 +82,8 @@ public abstract class CompressedBigDecimalSqlAggregatorTestBase extends BaseCalc
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
QueryableIndex index =

View File

@ -45,6 +45,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -76,7 +77,8 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
TDigestSketchModule.registerSerde();

View File

@ -57,6 +57,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -91,7 +92,8 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
@SuppressWarnings("resource")
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
HllSketchModule.registerSerde();

View File

@ -58,6 +58,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -101,7 +102,8 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
DoublesSketchModule.registerSerde();

View File

@ -53,6 +53,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -106,7 +107,8 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
SketchModule.registerSerde();

View File

@ -32,6 +32,7 @@ import org.apache.druid.security.basic.BasicSecurityAuthenticationException;
import org.apache.druid.security.basic.BasicSecuritySSLSocketFactory;
import org.apache.druid.security.basic.authentication.LdapUserPrincipal;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentials;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthenticationResult;
import javax.annotation.Nullable;
@ -193,7 +194,7 @@ public class LDAPCredentialsValidator implements CredentialsValidator
if (!validatePassword(this.ldapConfig, userDn, password)) {
LOG.debug("Password incorrect for LDAP user %s", username);
throw new BasicSecurityAuthenticationException("User LDAP authentication failed.");
throw new BasicSecurityAuthenticationException(Access.DEFAULT_ERROR_MESSAGE);
}
byte[] salt = BasicAuthUtils.generateSalt();

View File

@ -30,6 +30,7 @@ import org.apache.druid.security.basic.BasicSecurityAuthenticationException;
import org.apache.druid.security.basic.authentication.db.cache.BasicAuthenticatorCacheManager;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentials;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorUser;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthenticationResult;
import javax.annotation.Nullable;
@ -83,7 +84,7 @@ public class MetadataStoreCredentialsValidator implements CredentialsValidator
return new AuthenticationResult(username, authorizerName, authenticatorName, null);
} else {
LOG.debug("Password incorrect for metadata store user %s", username);
throw new BasicSecurityAuthenticationException("User metadata store authentication failed.");
throw new BasicSecurityAuthenticationException(Access.DEFAULT_ERROR_MESSAGE);
}
}
}

View File

@ -29,6 +29,7 @@ import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorC
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentials;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorUser;
import org.apache.druid.security.basic.authentication.validator.MetadataStoreCredentialsValidator;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthenticationResult;
import org.easymock.EasyMock;
import org.junit.Assert;
@ -143,7 +144,7 @@ public class DBCredentialsValidatorTest
String password = "badpassword";
expectedException.expect(BasicSecurityAuthenticationException.class);
expectedException.expectMessage("User metadata store authentication failed.");
expectedException.expectMessage(Access.DEFAULT_ERROR_MESSAGE);
validator.validateCredentials(authenticatorName, authorizerName, username, password.toCharArray());
}
}

View File

@ -54,6 +54,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -88,7 +89,8 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
InputRowParser parser = new MapInputRowParser(

View File

@ -50,6 +50,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -80,7 +81,8 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
ApproximateHistogramDruidModule.registerSerde();

View File

@ -49,6 +49,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -79,7 +80,8 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
ApproximateHistogramDruidModule.registerSerde();

View File

@ -51,6 +51,7 @@ import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
@ -89,7 +90,8 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
final QueryableIndex index =

View File

@ -37,6 +37,7 @@ import org.apache.druid.java.util.http.client.CredentialedHttpClient;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.auth.BasicCredentials;
import org.apache.druid.java.util.http.client.response.StatusResponseHolder;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction;
@ -307,11 +308,12 @@ public abstract class AbstractAuthConfigurationTest
// as user that can only read auth_test
LOG.info("Checking sys.segments query as datasourceOnlyUser...");
final String expectedMsg = "{\"Access-Check-Result\":\"" + Access.DEFAULT_ERROR_MESSAGE + "\"}";
verifySystemSchemaQueryFailure(
datasourceOnlyUserClient,
SYS_SCHEMA_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Unauthorized\"}"
expectedMsg
);
LOG.info("Checking sys.servers query as datasourceOnlyUser...");
@ -319,7 +321,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceOnlyUserClient,
SYS_SCHEMA_SERVERS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Unauthorized\"}"
expectedMsg
);
LOG.info("Checking sys.server_segments query as datasourceOnlyUser...");
@ -327,7 +329,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceOnlyUserClient,
SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Unauthorized\"}"
expectedMsg
);
LOG.info("Checking sys.tasks query as datasourceOnlyUser...");
@ -335,7 +337,7 @@ public abstract class AbstractAuthConfigurationTest
datasourceOnlyUserClient,
SYS_SCHEMA_TASKS_QUERY,
HttpResponseStatus.FORBIDDEN,
"{\"Access-Check-Result\":\"Unauthorized\"}"
expectedMsg
);
}

View File

@ -25,6 +25,7 @@ import org.apache.druid.java.util.http.client.CredentialedHttpClient;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.auth.BasicCredentials;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentialUpdate;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.testing.guice.DruidTestModuleFactory;
import org.apache.druid.testing.utils.HttpUtil;
@ -47,13 +48,13 @@ public class ITBasicAuthConfigurationTest extends AbstractAuthConfigurationTest
private static final String BASIC_AUTHENTICATOR = "basic";
private static final String BASIC_AUTHORIZER = "basic";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: User metadata store authentication failed.";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: " + Access.DEFAULT_ERROR_MESSAGE;
// This error must match both authorization paths: initial prepare of
// the query, and checks of resources used by a query during execution.
// The two errors are raised in different points in the code, but should
// The two errors are raised in different points in the code, but should
// look identical to users (and tests).
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: Unauthorized";
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: " + Access.DEFAULT_ERROR_MESSAGE;
private HttpClient druid99;

View File

@ -28,6 +28,7 @@ import org.apache.druid.java.util.http.client.CredentialedHttpClient;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.auth.BasicCredentials;
import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerGroupMapping;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.testing.IntegrationTestingConfig;
import org.apache.druid.testing.guice.DruidTestModuleFactory;
@ -53,8 +54,8 @@ public class ITBasicAuthLdapConfigurationTest extends AbstractAuthConfigurationT
private static final String LDAP_AUTHENTICATOR = "ldap";
private static final String LDAP_AUTHORIZER = "ldapauth";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: User LDAP authentication failed.";
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: Unauthorized";
private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: " + Access.DEFAULT_ERROR_MESSAGE;
private static final String EXPECTED_AVATICA_AUTHZ_ERROR = "Error while executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver error: " + Access.DEFAULT_ERROR_MESSAGE;
@Inject
IntegrationTestingConfig config;

View File

@ -97,8 +97,6 @@ public class JoinDataSource implements DataSource
private static final Logger log = new Logger(JoinDataSource.class);
private final DataSourceAnalysis analysis;
private JoinDataSource(
DataSource left,
DataSource right,

View File

@ -48,7 +48,6 @@ import java.util.Optional;
public class JoinDataSourceTest
{
public static final JoinableFactoryWrapper NOOP_JOINABLE_FACTORY_WRAPPER = new JoinableFactoryWrapper(
NoopJoinableFactory.INSTANCE
);
@ -147,8 +146,7 @@ public class JoinDataSourceTest
{
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Expected [2] children, got [0]");
final DataSource ignored = joinTableToTable.withChildren(Collections.emptyList());
joinTableToTable.withChildren(Collections.emptyList());
}
@Test
@ -197,7 +195,7 @@ public class JoinDataSourceTest
{
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("left filter is only supported if left data source is direct table access");
JoinDataSource ignored = JoinDataSource.create(
JoinDataSource.create(
new QueryDataSource(Mockito.mock(Query.class)),
new TableDataSource("table"),
"j.",
@ -293,7 +291,6 @@ public class JoinDataSourceTest
Assert.assertNotEquals(cacheKey1.length, 0);
Assert.assertNotEquals(cacheKey2.length, 0);
Assert.assertTrue(Arrays.equals(cacheKey1, cacheKey2));
}
@Test
@ -493,7 +490,6 @@ public class JoinDataSourceTest
Assert.assertEquals(cacheKey1.length, 0);
}
private static class JoinableFactoryWithCacheKey extends NoopJoinableFactory
{
@Override

View File

@ -157,7 +157,7 @@ public class QueryLifecycle
try {
preAuthorized(authenticationResult, authorizationResult);
if (!authorizationResult.isAllowed()) {
throw new ISE("Unauthorized");
throw new ISE(Access.DEFAULT_ERROR_MESSAGE);
}
queryResponse = execute();

View File

@ -164,7 +164,7 @@ public class QueryLifecycleTest
public void testRunSimpleUnauthorized()
{
expectedException.expect(ISE.class);
expectedException.expectMessage("Unauthorized");
expectedException.expectMessage(Access.DEFAULT_ERROR_MESSAGE);
EasyMock.expect(queryConfig.getContext()).andReturn(ImmutableMap.of()).anyTimes();
EasyMock.expect(authenticationResult.getIdentity()).andReturn(IDENTITY).anyTimes();

View File

@ -65,6 +65,7 @@ import org.apache.druid.server.initialization.ServerConfig;
import org.apache.druid.server.log.RequestLogger;
import org.apache.druid.server.log.TestRequestLogger;
import org.apache.druid.server.metrics.NoopServiceEmitter;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.server.security.AuthenticatorMapper;
import org.apache.druid.server.security.AuthorizerMapper;
@ -1567,7 +1568,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
{
final String query = "SELECT * FROM " + CalciteTests.FORBIDDEN_DATASOURCE;
final String expectedError = "Error 2 (00002) : Error while executing SQL \"" +
query + "\": Remote driver error: Unauthorized";
query + "\": Remote driver error: " + Access.DEFAULT_ERROR_MESSAGE;
try (Statement statement = client.createStatement()) {
statement.executeQuery(query);
}

View File

@ -35,6 +35,7 @@ import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.DataSource;
@ -59,6 +60,7 @@ import org.apache.druid.query.filter.OrDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.having.DimFilterHavingSpec;
import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
import org.apache.druid.query.ordering.StringComparator;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.scan.ScanQuery;
@ -70,6 +72,7 @@ import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.join.JoinType;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.server.QueryLifecycleFactory;
import org.apache.druid.server.security.AuthConfig;
@ -84,14 +87,22 @@ import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.rule.ExtensionCalciteRuleProvider;
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.schema.DruidSchemaManager;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.QueryLogHook;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.apache.druid.sql.calcite.util.SqlTestFramework.Builder;
import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier;
import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerFixture;
import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier;
import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier;
import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardPlannerComponentSupplier;
import org.apache.druid.sql.calcite.view.ViewManager;
import org.apache.druid.sql.http.SqlParameter;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@ -106,6 +117,7 @@ import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@ -120,7 +132,8 @@ import java.util.stream.Collectors;
* A base class for SQL query testing. It sets up query execution environment, provides useful helper methods,
* and populates data using {@link CalciteTests#createMockWalker}.
*/
public class BaseCalciteQueryTest extends CalciteTestBase implements QueryComponentSupplier
public class BaseCalciteQueryTest extends CalciteTestBase
implements QueryComponentSupplier, PlannerComponentSupplier
{
public static String NULL_STRING;
public static Float NULL_FLOAT;
@ -260,6 +273,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase implements QueryCompon
public QueryLogHook queryLogHook;
public QueryComponentSupplier baseComponentSupplier;
public PlannerComponentSupplier basePlannerComponentSupplier = new StandardPlannerComponentSupplier();
public BaseCalciteQueryTest()
{
@ -512,16 +526,17 @@ public class BaseCalciteQueryTest extends CalciteTestBase implements QueryCompon
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
return baseComponentSupplier.createQuerySegmentWalker(conglomerate);
return baseComponentSupplier.createQuerySegmentWalker(conglomerate, joinableFactory);
}
@Override
public SqlEngine createEngine(
QueryLifecycleFactory qlf,
ObjectMapper queryJsonMapper
final QueryLifecycleFactory qlf,
final ObjectMapper queryJsonMapper
)
{
if (engine0 == null) {
@ -531,6 +546,12 @@ public class BaseCalciteQueryTest extends CalciteTestBase implements QueryCompon
}
}
@Override
public QueryRunnerFactoryConglomerate createCongolmerate(Builder builder, Closer closer)
{
return baseComponentSupplier.createCongolmerate(builder, closer);
}
@Override
public void configureJsonMapper(ObjectMapper mapper)
{
@ -561,6 +582,42 @@ public class BaseCalciteQueryTest extends CalciteTestBase implements QueryCompon
return baseComponentSupplier.getJacksonModules();
}
@Override
public JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider)
{
return baseComponentSupplier.createJoinableFactoryWrapper(lookupProvider);
}
@Override
public Set<ExtensionCalciteRuleProvider> extensionCalciteRules()
{
return basePlannerComponentSupplier.extensionCalciteRules();
}
@Override
public ViewManager createViewManager()
{
return basePlannerComponentSupplier.createViewManager();
}
@Override
public void populateViews(ViewManager viewManager, PlannerFactory plannerFactory)
{
basePlannerComponentSupplier.populateViews(viewManager, plannerFactory);
}
@Override
public DruidSchemaManager createSchemaManager()
{
return basePlannerComponentSupplier.createSchemaManager();
}
@Override
public void finalizePlanner(PlannerFixture plannerFixture)
{
basePlannerComponentSupplier.finalizePlanner(plannerFixture);
}
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
@ -838,9 +895,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase implements QueryCompon
}
@Override
public SqlStatementFactory statementFactory(PlannerConfig plannerConfig, AuthConfig authConfig)
public PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
{
return getSqlStatementFactory(plannerConfig, authConfig);
return queryFramework.plannerFixture(BaseCalciteQueryTest.this, plannerConfig, authConfig);
}
@Override
@ -962,7 +1019,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase implements QueryCompon
AuthConfig authConfig
)
{
return queryFramework().statementFactory(plannerConfig, authConfig);
return queryFramework().plannerFixture(this, plannerConfig, authConfig).statementFactory();
}
protected void cannotVectorize()

View File

@ -592,7 +592,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
@Test
@Parameters(source = QueryContextForJoinProvider.class)
public void testFilterAndGroupByLookupUsingJoinOperatorWithValueFilterPushdownMatchesNothig(Map<String, Object> queryContext)
public void testFilterAndGroupByLookupUsingJoinOperatorWithValueFilterPushdownMatchesNothing(Map<String, Object> queryContext)
{
// Cannot vectorize JOIN operator.

View File

@ -59,6 +59,7 @@ import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.nested.NestedDataComplexTypeSerde;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
@ -160,7 +161,8 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
@SuppressWarnings("resource")
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
) throws IOException
{
NestedDataModule.registerHandlersAndSerde();

View File

@ -25,12 +25,13 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest.ResultsVerifier;
import org.apache.druid.sql.calcite.QueryTestRunner.QueryResults;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.QueryLogHook;
import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerFixture;
import org.apache.druid.sql.http.SqlParameter;
import org.junit.rules.ExpectedException;
@ -70,7 +71,7 @@ public class QueryTestBuilder
QueryLogHook queryLogHook();
ExpectedException expectedException();
ObjectMapper jsonMapper();
SqlStatementFactory statementFactory(PlannerConfig plannerConfig, AuthConfig authConfig);
PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig);
}
protected final QueryTestConfig config;
@ -88,6 +89,7 @@ public class QueryTestBuilder
protected boolean skipVectorize;
protected boolean queryCannotVectorize;
protected AuthConfig authConfig = new AuthConfig();
protected PlannerFixture plannerFixture;
public QueryTestBuilder(final QueryTestConfig config)
{
@ -200,13 +202,46 @@ public class QueryTestBuilder
return this;
}
/**
* By default, every test case creates its own planner based on the planner
* and auth config provided. If, however, a test wants to control setup, and
* run multiple test queries against the same setup, use this method to pass
* in the pre-built planner to use. If not set, the standard one is created
* per test.
*/
public QueryTestBuilder plannerFixture(PlannerFixture plannerFixture)
{
this.plannerFixture = plannerFixture;
return this;
}
public QueryTestRunner build()
{
return config.analyze(this);
}
/**
* Internal method to return the cached planner config, or create a new one
* based on the configs provided. Note: does not cache the newly created
* config: doing so would confuse the "please use mine" vs. "create a new
* one each time" semantics.
*/
protected PlannerFixture plannerFixture()
{
if (plannerFixture != null) {
return plannerFixture;
} else {
return config.plannerFixture(plannerConfig, authConfig);
}
}
public void run()
{
build().run();
}
public QueryResults results()
{
return build().resultsOnly();
}
}

View File

@ -177,11 +177,8 @@ public class QueryTestRunner
.sqlParameters(builder.parameters)
.auth(builder.authenticationResult)
.build();
final SqlStatementFactory sqlStatementFactory = builder.config.statementFactory(
builder.plannerConfig,
builder.authConfig
);
PreparedStatement stmt = sqlStatementFactory.preparedStatement(sqlQuery);
final SqlStatementFactory sqlStatementFactory = builder.plannerFixture().statementFactory();
final PreparedStatement stmt = sqlStatementFactory.preparedStatement(sqlQuery);
stmt.prepare();
resourceActions = stmt.allResources();
}
@ -212,10 +209,7 @@ public class QueryTestRunner
BaseCalciteQueryTest.log.info("SQL: %s", builder.sql);
final SqlStatementFactory sqlStatementFactory = builder.config.statementFactory(
builder.plannerConfig,
builder.authConfig
);
final SqlStatementFactory sqlStatementFactory = builder.plannerFixture().statementFactory();
final SqlQueryPlus sqlQuery = SqlQueryPlus.builder(builder.sql)
.sqlParameters(builder.parameters)
.auth(builder.authenticationResult)
@ -227,7 +221,7 @@ public class QueryTestRunner
vectorizeValues.add("force");
}
QueryLogHook queryLogHook = builder.config.queryLogHook();
final QueryLogHook queryLogHook = builder.config.queryLogHook();
for (final String vectorize : vectorizeValues) {
queryLogHook.clearRecordedQueries();
@ -485,4 +479,11 @@ public class QueryTestRunner
verifyStep.verify();
}
}
public QueryResults resultsOnly()
{
ExecuteQuery execStep = (ExecuteQuery) runSteps.get(0);
execStep.run();
return execStep.results().get(0);
}
}

View File

@ -22,18 +22,23 @@ package org.apache.druid.sql.calcite.util;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Provides;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.GlobalTableDataSource;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
import org.apache.druid.query.lookup.LookupSerdeModule;
import org.apache.druid.query.topn.TopNQueryConfig;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.server.QueryLifecycle;
import org.apache.druid.server.QueryLifecycleFactory;
import org.apache.druid.server.QueryStackTests;
@ -45,12 +50,15 @@ import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.rule.ExtensionCalciteRuleProvider;
import org.apache.druid.sql.calcite.run.NativeSqlEngine;
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.schema.DruidSchemaManager;
import org.apache.druid.sql.calcite.schema.NoopDruidSchemaManager;
import org.apache.druid.sql.calcite.view.DruidViewMacroFactory;
import org.apache.druid.sql.calcite.view.InProcessViewManager;
import org.apache.druid.sql.calcite.view.ViewManager;
import org.apache.druid.timeline.DataSegment;
import java.io.File;
@ -59,6 +67,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Builds the infrastructure needed to run Calcite tests. Building splits into
@ -84,12 +93,19 @@ import java.util.Map;
* extending {@link SqlTestFramework.StandardComponentSupplier StandardComponentSupplier}.
* <p>
* The framework should be built once per test class (not once per test method.)
* Then, per method, call {@link #statementFactory(PlannerConfig, AuthConfig)} to
* Then, for each planner setup, call
* {@link #plannerFixture(PlannerComponentSupplier, PlannerConfig, AuthConfig)}
* to get a {@link PlannerFixture} with a view manager and planner factory. Call
* {@link PlannerFixture#statementFactory()} to
* obtain a the test-specific planner and wrapper classes for that test. After
* that, tests use the various SQL statement classes to run tests. For tests
* based on {@code BaseCalciteQueryTest}, the statements are wrapped by the
* various {@code testQuery()} methods.
* <p>
* For tests that use non-standard views, first create the {@code PlannerFixture},
* populate the views, then use the {@code QueryTestBuilder} directly, passing in
* the {@code PlannerFixture} with views populated.
* <p>
* The framework holds on to the framework components. You can obtain the injector,
* object mapper and other items by calling the various methods. The objects
* are those created by the provided injector, or in this class, using objects
@ -107,8 +123,14 @@ public class SqlTestFramework
*/
public interface QueryComponentSupplier
{
QueryRunnerFactoryConglomerate createCongolmerate(
Builder builder,
Closer closer
);
SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
QueryRunnerFactoryConglomerate conglomerate,
JoinableFactoryWrapper joinableFactory
) throws IOException;
SqlEngine createEngine(
@ -127,6 +149,21 @@ public class SqlTestFramework
void configureJsonMapper(ObjectMapper mapper);
void configureGuice(DruidInjectorBuilder builder);
JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider);
}
public interface PlannerComponentSupplier
{
Set<ExtensionCalciteRuleProvider> extensionCalciteRules();
ViewManager createViewManager();
void populateViews(ViewManager viewManager, PlannerFactory plannerFactory);
DruidSchemaManager createSchemaManager();
void finalizePlanner(PlannerFixture plannerFixture);
}
/**
@ -149,15 +186,37 @@ public class SqlTestFramework
this.temporaryFolder = temporaryFolder;
}
@Override
public QueryRunnerFactoryConglomerate createCongolmerate(
Builder builder,
Closer resourceCloser
)
{
if (builder.mergeBufferCount == 0) {
return QueryStackTests.createQueryRunnerFactoryConglomerate(
resourceCloser,
() -> builder.minTopNThreshold
);
} else {
return QueryStackTests.createQueryRunnerFactoryConglomerate(
resourceCloser,
QueryStackTests.getProcessingConfig(true, builder.mergeBufferCount)
);
}
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
)
{
return TestDataBuilder.createMockWalker(
injector,
conglomerate,
temporaryFolder
temporaryFolder,
QueryStackTests.DEFAULT_NOOP_SCHEDULER,
joinableFactory
);
}
@ -205,6 +264,92 @@ public class SqlTestFramework
public void configureGuice(DruidInjectorBuilder builder)
{
}
@Override
public JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider)
{
return new JoinableFactoryWrapper(
QueryStackTests.makeJoinableFactoryFromDefault(
lookupProvider,
ImmutableSet.of(TestDataBuilder.CUSTOM_ROW_TABLE_JOINABLE),
ImmutableMap.of(TestDataBuilder.CUSTOM_ROW_TABLE_JOINABLE.getClass(), GlobalTableDataSource.class)
)
);
}
}
public static class StandardPlannerComponentSupplier implements PlannerComponentSupplier
{
@Override
public Set<ExtensionCalciteRuleProvider> extensionCalciteRules()
{
return ImmutableSet.of();
}
@Override
public ViewManager createViewManager()
{
return new InProcessViewManager(DRUID_VIEW_MACRO_FACTORY);
}
@Override
public void populateViews(ViewManager viewManager, PlannerFactory plannerFactory)
{
viewManager.createView(
plannerFactory,
"aview",
"SELECT SUBSTRING(dim1, 1, 1) AS dim1_firstchar FROM foo WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"bview",
"SELECT COUNT(*) FROM druid.foo\n"
+ "WHERE __time >= CURRENT_TIMESTAMP + INTERVAL '1' DAY AND __time < TIMESTAMP '2002-01-01 00:00:00'"
);
viewManager.createView(
plannerFactory,
"cview",
"SELECT SUBSTRING(bar.dim1, 1, 1) AS dim1_firstchar, bar.dim2 as dim2, dnf.l2 as l2\n"
+ "FROM (SELECT * from foo WHERE dim2 = 'a') as bar INNER JOIN druid.numfoo dnf ON bar.dim2 = dnf.dim2"
);
viewManager.createView(
plannerFactory,
"dview",
"SELECT SUBSTRING(dim1, 1, 1) AS numfoo FROM foo WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"forbiddenView",
"SELECT __time, SUBSTRING(dim1, 1, 1) AS dim1_firstchar, dim2 FROM foo WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"restrictedView",
"SELECT __time, dim1, dim2, m1 FROM druid.forbiddenDatasource WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"invalidView",
"SELECT __time, dim1, dim2, m1 FROM druid.invalidDatasource WHERE dim2 = 'a'"
);
}
@Override
public DruidSchemaManager createSchemaManager()
{
return new NoopDruidSchemaManager();
}
@Override
public void finalizePlanner(PlannerFixture plannerFixture)
{
}
}
/**
@ -240,8 +385,151 @@ public class SqlTestFramework
}
}
/**
* Builds the statement factory, which also builds all the infrastructure
* behind the factory by calling methods on this test class. As a result, each
* factory is specific to one test and one planner config. This method can be
* overridden to control the objects passed to the factory.
*/
public static class PlannerFixture
{
private final ViewManager viewManager;
private final PlannerFactory plannerFactory;
private final SqlStatementFactory statementFactory;
public PlannerFixture(
final SqlTestFramework framework,
final PlannerComponentSupplier componentSupplier,
final PlannerConfig plannerConfig,
final AuthConfig authConfig
)
{
this.viewManager = componentSupplier.createViewManager();
final DruidSchemaCatalog rootSchema = QueryFrameworkUtils.createMockRootSchema(
framework.injector,
framework.conglomerate(),
framework.walker(),
plannerConfig,
viewManager,
componentSupplier.createSchemaManager(),
framework.authorizerMapper
);
this.plannerFactory = new PlannerFactory(
rootSchema,
framework.operatorTable(),
framework.macroTable(),
plannerConfig,
framework.authorizerMapper,
framework.queryJsonMapper(),
CalciteTests.DRUID_SCHEMA_NAME,
new CalciteRulesManager(componentSupplier.extensionCalciteRules()),
framework.injector.getInstance(JoinableFactoryWrapper.class)
);
componentSupplier.finalizePlanner(this);
this.statementFactory = QueryFrameworkUtils.createSqlStatementFactory(
framework.engine,
plannerFactory,
authConfig
);
componentSupplier.populateViews(viewManager, plannerFactory);
}
public ViewManager viewManager()
{
return viewManager;
}
public PlannerFactory plannerFactory()
{
return plannerFactory;
}
public SqlStatementFactory statementFactory()
{
return statementFactory;
}
}
/**
* Guice module to create the various query framework items. By creating items within
* a module, later items can depend on those created earlier by grabbing them from the
* injector. This avoids the race condition that otherwise occurs if we try to build
* some of the items directly code, while others depend on the injector.
* <p>
* To allow customization, the instances are created via provider methods that pull
* dependencies from Guice, then call the component provider to create the instance.
* Tests customize the instances by overriding the instance creation methods.
* <p>
* This is an intermediate solution: the ultimate solution is to create things
* in Guice itself.
*/
private class TestSetupModule implements DruidModule
{
private final Builder builder;
public TestSetupModule(Builder builder)
{
this.builder = builder;
}
@Override
public void configure(Binder binder)
{
binder.bind(DruidOperatorTable.class).toInstance(componentSupplier.createOperatorTable());
binder.bind(ExprMacroTable.class).toInstance(componentSupplier.createMacroTable());
binder.bind(DataSegment.PruneSpecsHolder.class).toInstance(DataSegment.PruneSpecsHolder.DEFAULT);
}
@Override
public List<? extends Module> getJacksonModules()
{
return Lists.newArrayList(componentSupplier.getJacksonModules());
}
@Provides
public QueryRunnerFactoryConglomerate conglomerate()
{
return componentSupplier.createCongolmerate(builder, resourceCloser);
}
@Provides
public JoinableFactoryWrapper joinableFactoryWrapper(final Injector injector)
{
return builder.componentSupplier.createJoinableFactoryWrapper(
injector.getInstance(LookupExtractorFactoryContainerProvider.class)
);
}
@Provides
public SpecificSegmentsQuerySegmentWalker segmentsQuerySegmentWalker(final Injector injector)
{
try {
SpecificSegmentsQuerySegmentWalker walker = componentSupplier.createQuerySegmentWalker(
injector.getInstance(QueryRunnerFactoryConglomerate.class),
injector.getInstance(JoinableFactoryWrapper.class)
);
resourceCloser.register(walker);
return walker;
}
catch (IOException e) {
throw new RE(e);
}
}
@Provides
public QueryLifecycleFactory queryLifecycleFactory(final Injector injector)
{
return QueryFrameworkUtils.createMockQueryLifecycleFactory(
injector.getInstance(SpecificSegmentsQuerySegmentWalker.class),
injector.getInstance(QueryRunnerFactoryConglomerate.class)
);
}
}
public static final DruidViewMacroFactory DRUID_VIEW_MACRO_FACTORY = new TestDruidViewMacroFactory();
private final QueryComponentSupplier componentSupplier;
private final Closer resourceCloser = Closer.create();
private final Injector injector;
private final AuthorizerMapper authorizerMapper = CalciteTests.TEST_AUTHORIZER_MAPPER;
@ -249,63 +537,12 @@ public class SqlTestFramework
private SqlTestFramework(Builder builder)
{
this.injector = buildInjector(builder, resourceCloser);
this.componentSupplier = builder.componentSupplier;
this.injector = new CalciteTestInjectorBuilder()
.addModule(new TestSetupModule(builder))
.build();
this.engine = builder.componentSupplier.createEngine(queryLifecycleFactory(), queryJsonMapper());
builder.componentSupplier.configureJsonMapper(queryJsonMapper());
}
public Injector buildInjector(Builder builder, Closer resourceCloser)
{
CalciteTestInjectorBuilder injectorBuilder = new CalciteTestInjectorBuilder();
final QueryRunnerFactoryConglomerate conglomerate;
if (builder.mergeBufferCount == 0) {
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(
resourceCloser,
() -> builder.minTopNThreshold
);
} else {
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(
resourceCloser,
QueryStackTests.getProcessingConfig(true, builder.mergeBufferCount)
);
}
final SpecificSegmentsQuerySegmentWalker walker;
try {
walker = builder.componentSupplier.createQuerySegmentWalker(conglomerate);
}
catch (IOException e) {
throw new RE(e);
}
this.resourceCloser.register(walker);
final QueryLifecycleFactory qlf = QueryFrameworkUtils.createMockQueryLifecycleFactory(walker, conglomerate);
final DruidOperatorTable operatorTable = builder.componentSupplier.createOperatorTable();
final ExprMacroTable macroTable = builder.componentSupplier.createMacroTable();
injectorBuilder.addModule(new DruidModule()
{
@Override
public void configure(Binder binder)
{
binder.bind(QueryRunnerFactoryConglomerate.class).toInstance(conglomerate);
binder.bind(SpecificSegmentsQuerySegmentWalker.class).toInstance(walker);
binder.bind(QueryLifecycleFactory.class).toInstance(qlf);
binder.bind(DruidOperatorTable.class).toInstance(operatorTable);
binder.bind(ExprMacroTable.class).toInstance(macroTable);
binder.bind(DataSegment.PruneSpecsHolder.class).toInstance(DataSegment.PruneSpecsHolder.DEFAULT);
}
@Override
public List<? extends Module> getJacksonModules()
{
return Lists.newArrayList(builder.componentSupplier.getJacksonModules());
}
});
return injectorBuilder.build();
componentSupplier.configureJsonMapper(queryJsonMapper());
}
public ObjectMapper queryJsonMapper()
@ -344,88 +581,21 @@ public class SqlTestFramework
}
/**
* Build the statement factory, which also builds all the infrastructure
* behind the factory by calling methods on this test class. As a result, each
* factory is specific to one test and one planner config. This method can be
* overridden to control the objects passed to the factory.
* Creates an object (a "fixture") to hold the planner factory, view manager
* and related items. Most tests need just the statement factory. View-related
* tests also use the view manager. The fixture builds the infrastructure
* behind the factory by calling methods on the {@link QueryComponentSupplier}
* interface. That Calcite tests that interface, so the components can be customized
* by overriding methods in a particular tests. As a result, each
* planner fixture is specific to one test and one planner config.
*/
public SqlStatementFactory statementFactory(
public PlannerFixture plannerFixture(
PlannerComponentSupplier componentSupplier,
PlannerConfig plannerConfig,
AuthConfig authConfig
)
{
final InProcessViewManager viewManager = new InProcessViewManager(DRUID_VIEW_MACRO_FACTORY);
DruidSchemaCatalog rootSchema = QueryFrameworkUtils.createMockRootSchema(
injector,
conglomerate(),
walker(),
plannerConfig,
viewManager,
new NoopDruidSchemaManager(),
authorizerMapper
);
final PlannerFactory plannerFactory = new PlannerFactory(
rootSchema,
operatorTable(),
macroTable(),
plannerConfig,
authorizerMapper,
queryJsonMapper(),
CalciteTests.DRUID_SCHEMA_NAME,
new CalciteRulesManager(ImmutableSet.of()),
CalciteTests.createJoinableFactoryWrapper()
);
final SqlStatementFactory sqlStatementFactory = QueryFrameworkUtils.createSqlStatementFactory(
engine,
plannerFactory,
authConfig
);
viewManager.createView(
plannerFactory,
"aview",
"SELECT SUBSTRING(dim1, 1, 1) AS dim1_firstchar FROM foo WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"bview",
"SELECT COUNT(*) FROM druid.foo\n"
+ "WHERE __time >= CURRENT_TIMESTAMP + INTERVAL '1' DAY AND __time < TIMESTAMP '2002-01-01 00:00:00'"
);
viewManager.createView(
plannerFactory,
"cview",
"SELECT SUBSTRING(bar.dim1, 1, 1) AS dim1_firstchar, bar.dim2 as dim2, dnf.l2 as l2\n"
+ "FROM (SELECT * from foo WHERE dim2 = 'a') as bar INNER JOIN druid.numfoo dnf ON bar.dim2 = dnf.dim2"
);
viewManager.createView(
plannerFactory,
"dview",
"SELECT SUBSTRING(dim1, 1, 1) AS numfoo FROM foo WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"forbiddenView",
"SELECT __time, SUBSTRING(dim1, 1, 1) AS dim1_firstchar, dim2 FROM foo WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"restrictedView",
"SELECT __time, dim1, dim2, m1 FROM druid.forbiddenDatasource WHERE dim2 = 'a'"
);
viewManager.createView(
plannerFactory,
"invalidView",
"SELECT __time, dim1, dim2, m1 FROM druid.invalidDatasource WHERE dim2 = 'a'"
);
return sqlStatementFactory;
return new PlannerFixture(this, componentSupplier, plannerConfig, authConfig);
}
public void close()