mirror of https://github.com/apache/druid.git
Update Calcite*Test to use junit5 (#16106)
* Update Calcite*Test to use junit5 * change the way temp dirs are handled * add openrewrite workflow to safeguard upgrade * replace junitparamrunner with standard junit5 parametered tests * update a few rules to junit5 api * lots of boring changes * cleanup QueryLogHook * cleanup * fix compile error: ARRAYS_DATASOURCE * fix test * remove enclosed * empty +TEST:TDigestSketchSqlAggregatorTest,HllSketchSqlAggregatorTest,DoublesSketchSqlAggregatorTest,ThetaSketchSqlAggregatorTest,ArrayOfDoublesSketchSqlAggregatorTest,BloomFilterSqlAggregatorTest,BloomDimFilterSqlTest,CatalogIngestionTest,CatalogQueryTest,FixedBucketsHistogramQuantileSqlAggregatorTest,QuantileSqlAggregatorTest,MSQArraysTest,MSQDataSketchesTest,MSQExportTest,MSQFaultsTest,MSQInsertTest,MSQLoadedSegmentTests,MSQParseExceptionsTest,MSQReplaceTest,MSQSelectTest,InsertLockPreemptedFaultTest,MSQWarningsTest,SqlMSQStatementResourcePostTest,SqlStatementResourceTest,CalciteSelectJoinQueryMSQTest,CalciteSelectQueryMSQTest,CalciteUnionQueryMSQTest,MSQTestBase,VarianceSqlAggregatorTest,SleepSqlTest,SqlRowTransformerTest,DruidAvaticaHandlerTest,DruidStatementTest,BaseCalciteQueryTest,CalciteArraysQueryTest,CalciteCorrelatedQueryTest,CalciteExplainQueryTest,CalciteExportTest,CalciteIngestionDmlTest,CalciteInsertDmlTest,CalciteJoinQueryTest,CalciteLookupFunctionQueryTest,CalciteMultiValueStringQueryTest,CalciteNestedDataQueryTest,CalciteParameterQueryTest,CalciteQueryTest,CalciteReplaceDmlTest,CalciteScanSignatureTest,CalciteSelectQueryTest,CalciteSimpleQueryTest,CalciteSubqueryTest,CalciteSysQueryTest,CalciteTableAppendTest,CalciteTimeBoundaryQueryTest,CalciteUnionQueryTest,CalciteWindowQueryTest,DecoupledPlanningCalciteJoinQueryTest,DecoupledPlanningCalciteQueryTest,DecoupledPlanningCalciteUnionQueryTest,DrillWindowQueryTest,DruidPlannerResourceAnalyzeTest,IngestTableFunctionTest,QueryTestRunner,SqlTestFrameworkConfig,SqlAggregationModuleTest,ExpressionsTest,GreatestExpressionTest,IPv4AddressMatchExpressionTest,IPv4AddressParseExpressionTest,IPv4AddressStringifyExpressionTest,LeastExpressionTest,TimeFormatOperatorConversionTest,CombineAndSimplifyBoundsTest,FiltrationTest,SqlQueryTest,CalcitePlannerModuleTest,CalcitesTest,DruidCalciteSchemaModuleTest,DruidSchemaNoDataInitTest,InformationSchemaTest,NamedDruidSchemaTest,NamedLookupSchemaTest,NamedSystemSchemaTest,RootSchemaProviderTest,SystemSchemaTest,CalciteTestBase,SqlResourceTest * use @Nested * add rule to remove enclosed; upgrade surefire * remove enclosed * cleanup * add comment about surefire exclude
This commit is contained in:
parent
a151bcfd12
commit
0a42342cef
|
@ -34,7 +34,7 @@ env:
|
|||
MVN: mvn -B
|
||||
MAVEN_SKIP: -P skip-static-checks -Dweb.console.skip=true -Dmaven.javadoc.skip=true
|
||||
MAVEN_SKIP_TESTS: -P skip-tests
|
||||
MAVEN_OPTS: -Xmx3000m
|
||||
MAVEN_OPTS: -Xmx8g
|
||||
|
||||
jobs:
|
||||
static-checks:
|
||||
|
@ -144,6 +144,28 @@ jobs:
|
|||
--levels ERROR \
|
||||
--scope JavaInspectionsScope
|
||||
|
||||
openrewrite:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout branch
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '8'
|
||||
cache: 'maven'
|
||||
|
||||
- name: maven install
|
||||
run: |
|
||||
echo 'Running Maven install...' &&
|
||||
${MVN} clean install -q -ff -pl '!distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C &&
|
||||
${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
|
||||
|
||||
- name: rewrite:dryRun
|
||||
run: |
|
||||
${MVN} rewrite:dryRun ${MAVEN_SKIP}
|
||||
|
||||
web-checks:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
|
@ -63,6 +63,36 @@
|
|||
</dependency>
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.druid</groupId>
|
||||
<artifactId>druid-processing</artifactId>
|
||||
|
@ -91,11 +121,6 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>java-hamcrest</artifactId>
|
||||
|
|
|
@ -19,11 +19,17 @@
|
|||
|
||||
package org.apache.druid.compressedbigdecimal;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
|
||||
public class CompressedBigDecimalMaxSqlAggregatorTest extends CompressedBigDecimalSqlAggregatorTestBase
|
||||
{
|
||||
private static final String FUNCTION_NAME = CompressedBigDecimalMaxSqlAggregator.NAME;
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggWithNumberParse()
|
||||
{
|
||||
testCompressedBigDecimalAggWithNumberParseHelper(
|
||||
|
@ -34,15 +40,19 @@ public class CompressedBigDecimalMaxSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggWithStrictNumberParse()
|
||||
{
|
||||
testCompressedBigDecimalAggWithStrictNumberParseHelper(
|
||||
FUNCTION_NAME,
|
||||
CompressedBigDecimalMaxAggregatorFactory::new
|
||||
);
|
||||
assertThrows(NumberFormatException.class, () -> {
|
||||
testCompressedBigDecimalAggWithStrictNumberParseHelper(
|
||||
FUNCTION_NAME,
|
||||
CompressedBigDecimalMaxAggregatorFactory::new
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultNumberParseAndCustomSizeAndScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultNumberParseAndCustomSizeAndScaleHelper(
|
||||
|
@ -53,6 +63,7 @@ public class CompressedBigDecimalMaxSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultScaleHelper(
|
||||
|
@ -63,6 +74,7 @@ public class CompressedBigDecimalMaxSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultSizeAndScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultSizeAndScaleHelper(
|
||||
|
|
|
@ -19,11 +19,16 @@
|
|||
|
||||
package org.apache.druid.compressedbigdecimal;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.Assert.assertThrows;
|
||||
|
||||
public class CompressedBigDecimalMinSqlAggregatorTest extends CompressedBigDecimalSqlAggregatorTestBase
|
||||
{
|
||||
private static final String FUNCTION_NAME = CompressedBigDecimalMinSqlAggregator.NAME;
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggWithNumberParse()
|
||||
{
|
||||
testCompressedBigDecimalAggWithNumberParseHelper(
|
||||
|
@ -34,15 +39,19 @@ public class CompressedBigDecimalMinSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggWithStrictNumberParse()
|
||||
{
|
||||
testCompressedBigDecimalAggWithStrictNumberParseHelper(
|
||||
FUNCTION_NAME,
|
||||
CompressedBigDecimalMinAggregatorFactory::new
|
||||
);
|
||||
assertThrows(NumberFormatException.class, () -> {
|
||||
testCompressedBigDecimalAggWithStrictNumberParseHelper(
|
||||
FUNCTION_NAME,
|
||||
CompressedBigDecimalMinAggregatorFactory::new
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultNumberParseAndCustomSizeAndScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultNumberParseAndCustomSizeAndScaleHelper(
|
||||
|
@ -53,6 +62,7 @@ public class CompressedBigDecimalMinSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultScaleHelper(
|
||||
|
@ -63,6 +73,7 @@ public class CompressedBigDecimalMinSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultSizeAndScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultSizeAndScaleHelper(
|
||||
|
|
|
@ -48,9 +48,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -80,11 +79,11 @@ public abstract class CompressedBigDecimalSqlAggregatorTestBase extends BaseCalc
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
QueryableIndex index =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -120,7 +119,8 @@ public abstract class CompressedBigDecimalSqlAggregatorTestBase extends BaseCalc
|
|||
@Test
|
||||
public abstract void testCompressedBigDecimalAggWithNumberParse();
|
||||
|
||||
@Test(expected = NumberFormatException.class)
|
||||
// expected: NumberFormatException.class
|
||||
@Test
|
||||
public abstract void testCompressedBigDecimalAggWithStrictNumberParse();
|
||||
|
||||
@Test
|
||||
|
|
|
@ -19,11 +19,16 @@
|
|||
|
||||
package org.apache.druid.compressedbigdecimal;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.Assert.assertThrows;
|
||||
|
||||
public class CompressedBigDecimalSumSqlAggregatorTest extends CompressedBigDecimalSqlAggregatorTestBase
|
||||
{
|
||||
private static final String FUNCTION_NAME = CompressedBigDecimalSumSqlAggregator.NAME;
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggWithNumberParse()
|
||||
{
|
||||
testCompressedBigDecimalAggWithNumberParseHelper(
|
||||
|
@ -34,15 +39,19 @@ public class CompressedBigDecimalSumSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggWithStrictNumberParse()
|
||||
{
|
||||
testCompressedBigDecimalAggWithStrictNumberParseHelper(
|
||||
FUNCTION_NAME,
|
||||
CompressedBigDecimalSumAggregatorFactory::new
|
||||
);
|
||||
assertThrows(NumberFormatException.class, () -> {
|
||||
testCompressedBigDecimalAggWithStrictNumberParseHelper(
|
||||
FUNCTION_NAME,
|
||||
CompressedBigDecimalSumAggregatorFactory::new
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultNumberParseAndCustomSizeAndScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultNumberParseAndCustomSizeAndScaleHelper(
|
||||
|
@ -53,6 +62,7 @@ public class CompressedBigDecimalSumSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultScaleHelper(
|
||||
|
@ -63,6 +73,7 @@ public class CompressedBigDecimalSumSqlAggregatorTest extends CompressedBigDecim
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testCompressedBigDecimalAggDefaultSizeAndScale()
|
||||
{
|
||||
testCompressedBigDecimalAggDefaultSizeAndScaleHelper(
|
||||
|
|
|
@ -139,6 +139,31 @@
|
|||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
|
|
|
@ -53,9 +53,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
|
||||
|
@ -72,13 +71,13 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
TDigestSketchModule.registerSerde();
|
||||
|
||||
final QueryableIndex index =
|
||||
IndexBuilder.create(CalciteTests.getJsonMapper())
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -150,6 +150,31 @@
|
|||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
|
|
|
@ -86,9 +86,8 @@ import org.apache.druid.timeline.partition.LinearShardSpec;
|
|||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Period;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -257,12 +256,12 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
HllSketchModule.registerSerde();
|
||||
final QueryableIndex index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -62,9 +62,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -84,13 +83,13 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
DoublesSketchModule.registerSerde();
|
||||
|
||||
final QueryableIndex index =
|
||||
IndexBuilder.create(CalciteTests.getJsonMapper())
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -70,9 +70,8 @@ import org.apache.druid.timeline.partition.LinearShardSpec;
|
|||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Period;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -111,12 +110,12 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
SketchModule.registerSerde();
|
||||
|
||||
final QueryableIndex index = IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -52,9 +52,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -111,12 +110,12 @@ public class ArrayOfDoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
ArrayOfDoublesSketchModule.registerSerde();
|
||||
|
||||
final QueryableIndex index = IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(
|
||||
OffHeapMemorySegmentWriteOutMediumFactory.instance()
|
||||
)
|
||||
|
|
|
@ -109,6 +109,36 @@
|
|||
</dependency>
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.druid</groupId>
|
||||
<artifactId>druid-processing</artifactId>
|
||||
|
@ -130,11 +160,6 @@
|
|||
<scope>test</scope>
|
||||
<type>test-jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
|
|
|
@ -54,9 +54,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
|
||||
|
@ -77,11 +76,11 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
final QueryableIndex index =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -39,8 +39,8 @@ import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
|
|||
import org.apache.druid.sql.calcite.filtration.Filtration;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.http.SqlParameter;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -219,7 +219,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
|||
);
|
||||
}
|
||||
|
||||
@Ignore("this test is really slow and is intended to use for comparisons with testBloomFilterBigParameter")
|
||||
@Disabled("this test is really slow and is intended to use for comparisons with testBloomFilterBigParameter")
|
||||
@Test
|
||||
public void testBloomFilterBigNoParam() throws IOException
|
||||
{
|
||||
|
@ -247,7 +247,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
|||
);
|
||||
}
|
||||
|
||||
@Ignore("this test is for comparison with testBloomFilterBigNoParam")
|
||||
@Disabled("this test is for comparison with testBloomFilterBigNoParam")
|
||||
@Test
|
||||
public void testBloomFilterBigParameter() throws IOException
|
||||
{
|
||||
|
|
|
@ -148,13 +148,38 @@
|
|||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.druid.catalog.storage.CatalogTests;
|
|||
import org.apache.druid.catalog.sync.CachedMetadataCatalog;
|
||||
import org.apache.druid.catalog.sync.MetadataCatalog;
|
||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||
import org.apache.druid.metadata.TestDerbyConnector;
|
||||
import org.apache.druid.metadata.TestDerbyConnector.DerbyConnectorRule5;
|
||||
import org.apache.druid.segment.column.ColumnType;
|
||||
import org.apache.druid.segment.column.RowSignature;
|
||||
import org.apache.druid.sql.calcite.CalciteIngestionDmlTest;
|
||||
|
@ -36,8 +36,8 @@ import org.apache.druid.sql.calcite.filtration.Filtration;
|
|||
import org.apache.druid.sql.calcite.planner.CatalogResolver;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.calcite.util.SqlTestFramework;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
|
@ -48,9 +48,8 @@ import static org.junit.Assert.fail;
|
|||
*/
|
||||
public class CatalogIngestionTest extends CalciteIngestionDmlTest
|
||||
{
|
||||
@ClassRule
|
||||
public static final TestDerbyConnector.DerbyConnectorRule DERBY_CONNECTION_RULE =
|
||||
new TestDerbyConnector.DerbyConnectorRule();
|
||||
@RegisterExtension
|
||||
public static final DerbyConnectorRule5 DERBY_CONNECTION_RULE = new DerbyConnectorRule5();
|
||||
|
||||
/**
|
||||
* Signature for the foo datasource after applying catalog metadata.
|
||||
|
|
|
@ -27,14 +27,14 @@ import org.apache.druid.catalog.storage.CatalogStorage;
|
|||
import org.apache.druid.catalog.storage.CatalogTests;
|
||||
import org.apache.druid.catalog.sync.CachedMetadataCatalog;
|
||||
import org.apache.druid.catalog.sync.MetadataCatalog;
|
||||
import org.apache.druid.metadata.TestDerbyConnector;
|
||||
import org.apache.druid.metadata.TestDerbyConnector.DerbyConnectorRule5;
|
||||
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
|
||||
import org.apache.druid.sql.calcite.SqlSchema;
|
||||
import org.apache.druid.sql.calcite.planner.CatalogResolver;
|
||||
import org.apache.druid.sql.calcite.util.SqlTestFramework;
|
||||
import org.junit.After;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -43,8 +43,8 @@ import static org.junit.Assert.fail;
|
|||
|
||||
public class CatalogQueryTest extends BaseCalciteQueryTest
|
||||
{
|
||||
@Rule
|
||||
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
|
||||
@RegisterExtension
|
||||
public static final DerbyConnectorRule5 DERBY_CONNECTION_RULE = new DerbyConnectorRule5();
|
||||
|
||||
private CatalogTests.DbFixture dbFixture;
|
||||
private CatalogStorage storage;
|
||||
|
@ -70,7 +70,7 @@ public class CatalogQueryTest extends BaseCalciteQueryTest
|
|||
.run();
|
||||
}
|
||||
|
||||
@After
|
||||
@AfterEach
|
||||
public void catalogTearDown()
|
||||
{
|
||||
CatalogTests.tearDown(dbFixture);
|
||||
|
@ -79,7 +79,7 @@ public class CatalogQueryTest extends BaseCalciteQueryTest
|
|||
@Override
|
||||
public CatalogResolver createCatalogResolver()
|
||||
{
|
||||
dbFixture = new CatalogTests.DbFixture(derbyConnectorRule);
|
||||
dbFixture = new CatalogTests.DbFixture(DERBY_CONNECTION_RULE);
|
||||
storage = dbFixture.storage;
|
||||
MetadataCatalog catalog = new CachedMetadataCatalog(
|
||||
storage,
|
||||
|
|
|
@ -53,10 +53,8 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
|
@ -76,7 +74,6 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class HdfsInputSourceTest extends InitializedNullHandlingTest
|
||||
{
|
||||
private static final String PATH = "hdfs://localhost:7020/foo/bar";
|
||||
|
|
|
@ -93,6 +93,36 @@
|
|||
</dependency>
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.druid</groupId>
|
||||
<artifactId>druid-processing</artifactId>
|
||||
|
@ -114,11 +144,6 @@
|
|||
<scope>test</scope>
|
||||
<type>test-jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
|
|
|
@ -56,9 +56,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQueryTest
|
||||
|
@ -75,12 +74,12 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
ApproximateHistogramDruidModule.registerSerde();
|
||||
|
||||
final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper())
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -55,9 +55,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
|
||||
|
@ -74,12 +73,12 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
ApproximateHistogramDruidModule.registerSerde();
|
||||
|
||||
final QueryableIndex index = IndexBuilder.create(CalciteTests.getJsonMapper())
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -25,13 +25,10 @@ import org.apache.druid.server.initialization.JdbcAccessSecurityConfig;
|
|||
import org.joda.time.Period;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class JdbcExtractionNamespaceUrlCheckTest
|
||||
{
|
||||
private static final String TABLE_NAME = "abstractDbRenameTest";
|
||||
|
|
|
@ -35,9 +35,7 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.skife.jdbi.v2.Handle;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -45,7 +43,6 @@ import java.io.UncheckedIOException;
|
|||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class JdbcDataFetcherTest extends InitializedNullHandlingTest
|
||||
{
|
||||
private static final String TABLE_NAME = "tableName";
|
||||
|
|
|
@ -24,13 +24,10 @@ import org.apache.druid.metadata.MetadataStorageConnectorConfig;
|
|||
import org.apache.druid.server.initialization.JdbcAccessSecurityConfig;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class JdbcDataFetcherUrlCheckTest
|
||||
{
|
||||
private static final String TABLE_NAME = "tableName";
|
||||
|
|
|
@ -203,11 +203,31 @@
|
|||
</dependency>
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
|
@ -224,8 +244,13 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -49,11 +49,10 @@ import org.apache.druid.sql.calcite.planner.ColumnMappings;
|
|||
import org.apache.druid.timeline.SegmentId;
|
||||
import org.apache.druid.utils.CompressionUtils;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -71,14 +70,12 @@ import java.util.Map;
|
|||
/**
|
||||
* Tests INSERT and SELECT behaviour of MSQ with arrays and MVDs
|
||||
*/
|
||||
@RunWith(Parameterized.class)
|
||||
public class MSQArraysTest extends MSQTestBase
|
||||
{
|
||||
private String dataFileNameJsonString;
|
||||
private String dataFileSignatureJsonString;
|
||||
private DataSource dataFileExternalDataSource;
|
||||
|
||||
@Parameterized.Parameters(name = "{index}:with context {0}")
|
||||
public static Collection<Object[]> data()
|
||||
{
|
||||
Object[][] data = new Object[][]{
|
||||
|
@ -90,17 +87,11 @@ public class MSQArraysTest extends MSQTestBase
|
|||
return Arrays.asList(data);
|
||||
}
|
||||
|
||||
@Parameterized.Parameter(0)
|
||||
public String contextName;
|
||||
|
||||
@Parameterized.Parameter(1)
|
||||
public Map<String, Object> context;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setup() throws IOException
|
||||
{
|
||||
// Read the file and make the name available to the tests
|
||||
File dataFile = temporaryFolder.newFile();
|
||||
File dataFile = newTempFile("dataFile");
|
||||
final InputStream resourceStream = NestedDataTestUtils.class.getClassLoader()
|
||||
.getResourceAsStream(NestedDataTestUtils.ARRAY_TYPES_DATA_FILE);
|
||||
final InputStream decompressing = CompressionUtils.decompress(
|
||||
|
@ -134,8 +125,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to none (default) and the user tries to ingest
|
||||
* string arrays
|
||||
*/
|
||||
@Test
|
||||
public void testInsertStringArrayWithArrayIngestModeNone()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertStringArrayWithArrayIngestModeNone(String contextName, Map<String, Object> context)
|
||||
{
|
||||
|
||||
final Map<String, Object> adjustedContext = new HashMap<>(context);
|
||||
|
@ -156,8 +148,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to none (default) and the user tries to ingest
|
||||
* string arrays
|
||||
*/
|
||||
@Test
|
||||
public void testReplaceMvdWithStringArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceMvdWithStringArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final Map<String, Object> adjustedContext = new HashMap<>(context);
|
||||
adjustedContext.put(MultiStageQueryContext.CTX_ARRAY_INGEST_MODE, "array");
|
||||
|
@ -182,8 +175,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to none (default) and the user tries to ingest
|
||||
* string arrays
|
||||
*/
|
||||
@Test
|
||||
public void testReplaceStringArrayWithMvdInArrayMode()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceStringArrayWithMvdInArrayMode(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final Map<String, Object> adjustedContext = new HashMap<>(context);
|
||||
adjustedContext.put(MultiStageQueryContext.CTX_ARRAY_INGEST_MODE, "array");
|
||||
|
@ -209,8 +203,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to none (default) and the user tries to ingest
|
||||
* string arrays
|
||||
*/
|
||||
@Test
|
||||
public void testReplaceStringArrayWithMvdInMvdMode()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceStringArrayWithMvdInMvdMode(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final Map<String, Object> adjustedContext = new HashMap<>(context);
|
||||
adjustedContext.put(MultiStageQueryContext.CTX_ARRAY_INGEST_MODE, "mvd");
|
||||
|
@ -236,8 +231,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to none (default) and the user tries to ingest
|
||||
* string arrays
|
||||
*/
|
||||
@Test
|
||||
public void testReplaceMvdWithStringArraySkipValidation()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceMvdWithStringArraySkipValidation(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final Map<String, Object> adjustedContext = new HashMap<>(context);
|
||||
adjustedContext.put(MultiStageQueryContext.CTX_ARRAY_INGEST_MODE, "array");
|
||||
|
@ -284,8 +280,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to none (default) and the user tries to ingest
|
||||
* string arrays
|
||||
*/
|
||||
@Test
|
||||
public void testReplaceMvdWithMvd()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceMvdWithMvd(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final Map<String, Object> adjustedContext = new HashMap<>(context);
|
||||
adjustedContext.put(MultiStageQueryContext.CTX_ARRAY_INGEST_MODE, "array");
|
||||
|
@ -322,8 +319,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to mvd (default) and the only array type to be
|
||||
* ingested is string array
|
||||
*/
|
||||
@Test
|
||||
public void testInsertOnFoo1WithMultiValueToArrayGroupByWithDefaultContext()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithMultiValueToArrayGroupByWithDefaultContext(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -343,8 +341,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
/**
|
||||
* Tests the INSERT query when 'auto' type is set
|
||||
*/
|
||||
@Test
|
||||
public void testInsertArraysAutoType()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertArraysAutoType(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = Arrays.asList(
|
||||
new Object[]{1672531200000L, null, null, null},
|
||||
|
@ -396,8 +395,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to mvd and the user tries to ingest numeric array
|
||||
* types as well
|
||||
*/
|
||||
@Test
|
||||
public void testInsertArraysWithStringArraysAsMVDs()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertArraysWithStringArraysAsMVDs(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final Map<String, Object> adjustedContext = new HashMap<>(context);
|
||||
adjustedContext.put(MultiStageQueryContext.CTX_ARRAY_INGEST_MODE, "mvd");
|
||||
|
@ -430,8 +430,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
* Tests the behaviour of INSERT query when arrayIngestMode is set to array and the user tries to ingest all
|
||||
* array types
|
||||
*/
|
||||
@Test
|
||||
public void testInsertArraysAsArrays()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertArraysAsArrays(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final List<Object[]> expectedRows = Arrays.asList(
|
||||
new Object[]{
|
||||
|
@ -602,27 +603,30 @@ public class MSQArraysTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnArraysWithArrayIngestModeAsNone()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnArraysWithArrayIngestModeAsNone(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectOnArrays("none");
|
||||
testSelectOnArrays(contextName, context, "none");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnArraysWithArrayIngestModeAsMVD()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnArraysWithArrayIngestModeAsMVD(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectOnArrays("mvd");
|
||||
testSelectOnArrays(contextName, context, "mvd");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnArraysWithArrayIngestModeAsArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnArraysWithArrayIngestModeAsArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectOnArrays("array");
|
||||
testSelectOnArrays(contextName, context, "array");
|
||||
}
|
||||
|
||||
// Tests the behaviour of the select with the given arrayIngestMode. The expectation should be the same, since the
|
||||
// arrayIngestMode should only determine how the array gets ingested at the end.
|
||||
public void testSelectOnArrays(String arrayIngestMode)
|
||||
public void testSelectOnArrays(String contextName, Map<String, Object> context, String arrayIngestMode)
|
||||
{
|
||||
final List<Object[]> expectedRows = Arrays.asList(
|
||||
new Object[]{
|
||||
|
@ -839,8 +843,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testScanWithOrderByOnStringArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testScanWithOrderByOnStringArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final List<Object[]> expectedRows = Arrays.asList(
|
||||
new Object[]{Arrays.asList("d", "e")},
|
||||
|
@ -902,8 +907,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testScanWithOrderByOnLongArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testScanWithOrderByOnLongArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final List<Object[]> expectedRows = Arrays.asList(
|
||||
new Object[]{null},
|
||||
|
@ -964,8 +970,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testScanWithOrderByOnDoubleArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testScanWithOrderByOnDoubleArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final List<Object[]> expectedRows = Arrays.asList(
|
||||
new Object[]{null},
|
||||
|
@ -1026,8 +1033,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testScanExternBooleanArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testScanExternBooleanArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final List<Object[]> expectedRows = Collections.singletonList(
|
||||
new Object[]{Arrays.asList(1L, 0L, null)}
|
||||
|
@ -1073,8 +1081,9 @@ public class MSQArraysTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testScanExternArrayWithNonConvertibleType()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testScanExternArrayWithNonConvertibleType(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final List<Object[]> expectedRows = Collections.singletonList(
|
||||
new Object[]{Arrays.asList(null, null)}
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.druid.sql.calcite.filtration.Filtration;
|
|||
import org.apache.druid.sql.calcite.planner.ColumnMapping;
|
||||
import org.apache.druid.sql.calcite.planner.ColumnMappings;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
/**
|
||||
* Tests of MSQ with functions from the "druid-datasketches" extension.
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.druid.msq.util.MultiStageQueryContext;
|
|||
import org.apache.druid.segment.column.ColumnType;
|
||||
import org.apache.druid.segment.column.RowSignature;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
|
@ -106,14 +106,14 @@ public class MSQExportTest extends MSQTestBase
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testNumberOfRowsPerFile() throws IOException
|
||||
public void testNumberOfRowsPerFile()
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
.add("dim1", ColumnType.STRING)
|
||||
.add("cnt", ColumnType.LONG).build();
|
||||
|
||||
File exportDir = temporaryFolder.newFolder("export/");
|
||||
File exportDir = newTempFolder("export");
|
||||
|
||||
Map<String, Object> queryContext = new HashMap<>(DEFAULT_MSQ_CONTEXT);
|
||||
queryContext.put(MultiStageQueryContext.CTX_ROWS_PER_PAGE, 1);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.druid.msq.exec;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.druid.error.DruidException;
|
||||
|
@ -40,7 +41,6 @@ import org.apache.druid.msq.indexing.error.TooManyColumnsFault;
|
|||
import org.apache.druid.msq.indexing.error.TooManyInputFilesFault;
|
||||
import org.apache.druid.msq.indexing.error.TooManyPartitionsFault;
|
||||
import org.apache.druid.msq.test.MSQTestBase;
|
||||
import org.apache.druid.msq.test.MSQTestFileUtils;
|
||||
import org.apache.druid.msq.test.MSQTestTaskActionClient;
|
||||
import org.apache.druid.segment.column.ColumnType;
|
||||
import org.apache.druid.segment.column.RowSignature;
|
||||
|
@ -49,13 +49,16 @@ import org.apache.druid.timeline.DataSegment;
|
|||
import org.apache.druid.timeline.partition.DimensionRangeShardSpec;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -291,7 +294,7 @@ public class MSQFaultsTest extends MSQTestBase
|
|||
.add("__time", ColumnType.LONG)
|
||||
.build();
|
||||
|
||||
File file = MSQTestFileUtils.generateTemporaryNdJsonFile(temporaryFolder, 30000, 1);
|
||||
File file = createNdJsonFile(newTempFile("ndjson30k"), 30000, 1);
|
||||
String filePathAsJson = queryFramework().queryJsonMapper().writeValueAsString(file.getAbsolutePath());
|
||||
|
||||
testIngestQuery().setSql(" insert into foo1 SELECT\n"
|
||||
|
@ -311,6 +314,27 @@ public class MSQFaultsTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method that populates a file with {@code numRows} rows and {@code numColumns} columns where the
|
||||
* first column is a string 'timestamp' while the rest are string columns with junk value
|
||||
*/
|
||||
public static File createNdJsonFile(File file, final int numRows, final int numColumns) throws IOException
|
||||
{
|
||||
for (int currentRow = 0; currentRow < numRows; ++currentRow) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("{");
|
||||
sb.append("\"timestamp\":\"2016-06-27T00:00:11.080Z\"");
|
||||
for (int currentColumn = 1; currentColumn < numColumns; ++currentColumn) {
|
||||
sb.append(StringUtils.format(",\"column%s\":\"val%s\"", currentColumn, currentRow));
|
||||
}
|
||||
sb.append("}");
|
||||
Files.write(file.toPath(), ImmutableList.of(sb.toString()), StandardCharsets.UTF_8, StandardOpenOption.APPEND);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertWithManyColumns()
|
||||
{
|
||||
|
@ -399,7 +423,7 @@ public class MSQFaultsTest extends MSQTestBase
|
|||
|
||||
final int numFiles = 20000;
|
||||
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
String externalFiles = String.join(", ", Collections.nCopies(numFiles, toReadFileNameAsJson));
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.apache.druid.msq.indexing.error.RowTooLargeFault;
|
|||
import org.apache.druid.msq.kernel.WorkerAssignmentStrategy;
|
||||
import org.apache.druid.msq.test.CounterSnapshotMatcher;
|
||||
import org.apache.druid.msq.test.MSQTestBase;
|
||||
import org.apache.druid.msq.test.MSQTestFileUtils;
|
||||
import org.apache.druid.msq.util.MultiStageQueryContext;
|
||||
import org.apache.druid.query.QueryContexts;
|
||||
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
|
||||
|
@ -48,10 +47,9 @@ import org.apache.druid.segment.column.RowSignature;
|
|||
import org.apache.druid.segment.column.ValueType;
|
||||
import org.apache.druid.timeline.SegmentId;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -67,7 +65,6 @@ import java.util.Set;
|
|||
import java.util.TreeSet;
|
||||
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
public class MSQInsertTest extends MSQTestBase
|
||||
{
|
||||
|
||||
|
@ -82,7 +79,6 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.build();
|
||||
private final HashFunction fn = Hashing.murmur3_128();
|
||||
|
||||
@Parameterized.Parameters(name = "{index}:with context {0}")
|
||||
public static Collection<Object[]> data()
|
||||
{
|
||||
Object[][] data = new Object[][]{
|
||||
|
@ -94,15 +90,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
};
|
||||
return Arrays.asList(data);
|
||||
}
|
||||
|
||||
@Parameterized.Parameter(0)
|
||||
public String contextName;
|
||||
|
||||
@Parameterized.Parameter(1)
|
||||
public Map<String, Object> context;
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = expectedFooRows();
|
||||
int expectedCounterRows = expectedRows.size();
|
||||
|
@ -154,8 +144,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertWithExistingTimeColumn() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertWithExistingTimeColumn(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
List<Object[]> expectedRows = ImmutableList.of(
|
||||
new Object[] {1678897351000L, "A"},
|
||||
|
@ -168,9 +159,7 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.add("flags", ColumnType.STRING)
|
||||
.build();
|
||||
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this,
|
||||
"/dataset-with-time-column.json"
|
||||
);
|
||||
final File toRead = getResourceAsTemporaryFile("/dataset-with-time-column.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
testIngestQuery().setSql(" INSERT INTO foo1 SELECT\n"
|
||||
|
@ -193,8 +182,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertWithUnnestInline()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertWithUnnestInline(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = ImmutableList.of(
|
||||
new Object[]{1692226800000L, 1L},
|
||||
|
@ -218,8 +208,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertWithUnnest()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertWithUnnest(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = ImmutableList.of(
|
||||
new Object[]{946684800000L, "a"},
|
||||
|
@ -248,8 +239,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertWithUnnestWithVirtualColumns()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertWithUnnestWithVirtualColumns(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = ImmutableList.of(
|
||||
new Object[]{946684800000L, 1.0f},
|
||||
|
@ -282,10 +274,11 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnExternalDataSource() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnExternalDataSource(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -347,8 +340,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithGroupByLimitWithoutClusterBy()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithGroupByLimitWithoutClusterBy(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = expectedFooRows();
|
||||
int expectedCounterRows = expectedRows.size();
|
||||
|
@ -395,8 +389,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithTwoCountAggregatorsWithRollupContext()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithTwoCountAggregatorsWithRollupContext(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final List<Object[]> expectedRows = expectedFooRows();
|
||||
|
||||
|
@ -434,8 +429,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithGroupByLimitWithClusterBy()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithGroupByLimitWithClusterBy(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = expectedFooRows();
|
||||
int expectedCounterRows = expectedRows.size();
|
||||
|
@ -485,8 +481,10 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
|
||||
}
|
||||
@Test
|
||||
public void testInsertOnFoo1WithTimeFunction()
|
||||
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithTimeFunction(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -504,8 +502,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithTimeAggregator()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithTimeAggregator(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -534,8 +533,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithTimeAggregatorAndMultipleWorkers()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithTimeAggregatorAndMultipleWorkers(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = new HashMap<>(context);
|
||||
localContext.put(MultiStageQueryContext.CTX_TASK_ASSIGNMENT_STRATEGY, WorkerAssignmentStrategy.MAX.name());
|
||||
|
@ -568,8 +568,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithTimePostAggregator()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithTimePostAggregator(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -599,8 +600,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithTimeFunctionWithSequential()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithTimeFunctionWithSequential(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = expectedFooRows();
|
||||
int expectedCounterRows = expectedRows.size();
|
||||
|
@ -610,7 +612,7 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.add("__time", ColumnType.LONG)
|
||||
.add("dim1", ColumnType.STRING)
|
||||
.add("cnt", ColumnType.LONG).build();
|
||||
Map<String, Object> context = ImmutableMap.<String, Object>builder()
|
||||
Map<String, Object> newContext = ImmutableMap.<String, Object>builder()
|
||||
.putAll(DEFAULT_MSQ_CONTEXT)
|
||||
.put(
|
||||
MultiStageQueryContext.CTX_CLUSTER_STATISTICS_MERGE_MODE,
|
||||
|
@ -620,10 +622,10 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
testIngestQuery().setSql(
|
||||
"insert into foo1 select floor(__time to day) as __time , dim1 , count(*) as cnt from foo where dim1 is not null group by 1, 2 PARTITIONED by day clustered by dim1")
|
||||
.setQueryContext(context)
|
||||
.setQueryContext(newContext)
|
||||
.setExpectedDataSource("foo1")
|
||||
.setExpectedRowSignature(rowSignature)
|
||||
.setQueryContext(MSQInsertTest.this.context)
|
||||
.setQueryContext(context)
|
||||
.setExpectedSegment(expectedFooSegments())
|
||||
.setExpectedResultRows(expectedRows)
|
||||
.setExpectedCountersForStageWorkerChannel(
|
||||
|
@ -660,8 +662,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithMultiValueDim()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithMultiValueDim(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -677,8 +680,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1MultiValueDimWithLimitWithoutClusterBy()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1MultiValueDimWithLimitWithoutClusterBy(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -694,8 +698,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1MultiValueDimWithLimitWithClusterBy()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1MultiValueDimWithLimitWithClusterBy(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -711,8 +716,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithMultiValueDimGroupBy()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithMultiValueDimGroupBy(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -728,8 +734,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithMultiValueMeasureGroupBy()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithMultiValueMeasureGroupBy(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery().setSql(
|
||||
"INSERT INTO foo1 SELECT count(dim3) FROM foo WHERE dim3 IS NOT NULL GROUP BY 1 PARTITIONED BY ALL TIME")
|
||||
|
@ -742,9 +749,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithAutoTypeArrayGroupBy()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithAutoTypeArrayGroupBy(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -777,8 +784,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -812,8 +820,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsMvd()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithArrayIngestModeArrayGroupByInsertAsMvd(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -847,8 +856,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOnFoo1WithMultiValueDimGroupByWithoutGroupByEnable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOnFoo1WithMultiValueDimGroupByWithoutGroupByEnable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = ImmutableMap.<String, Object>builder()
|
||||
.putAll(context)
|
||||
|
@ -868,8 +878,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyExecutionError();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRollUpOnFoo1UpOnFoo1()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testRollUpOnFoo1UpOnFoo1(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = expectedFooRows();
|
||||
int expectedCounterRows = expectedRows.size();
|
||||
|
@ -925,8 +936,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRollUpOnFoo1WithTimeFunction()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testRollUpOnFoo1WithTimeFunction(String contextName, Map<String, Object> context)
|
||||
{
|
||||
List<Object[]> expectedRows = expectedFooRows();
|
||||
int expectedCounterRows = expectedRows.size();
|
||||
|
@ -982,8 +994,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertWithClusteredByDescendingThrowsException()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertWithClusteredByDescendingThrowsException(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Add a DESC clustered by column, which should not be allowed
|
||||
testIngestQuery().setSql("INSERT INTO foo1 "
|
||||
|
@ -999,8 +1012,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRollUpOnFoo1WithTimeFunctionComplexCol()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testRollUpOnFoo1WithTimeFunctionComplexCol(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -1025,8 +1039,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testRollUpOnFoo1ComplexCol()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testRollUpOnFoo1ComplexCol(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -1048,10 +1063,11 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRollUpOnExternalDataSource() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testRollUpOnExternalDataSource(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1115,10 +1131,11 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test()
|
||||
public void testRollUpOnExternalDataSourceWithCompositeKey() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testRollUpOnExternalDataSourceWithCompositeKey(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1191,8 +1208,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertWrongTypeTimestamp()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertWrongTypeTimestamp(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final RowSignature rowSignature =
|
||||
RowSignature.builder()
|
||||
|
@ -1220,8 +1238,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncorrectInsertQuery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testIncorrectInsertQuery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery()
|
||||
.setSql(
|
||||
|
@ -1234,8 +1253,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testInsertRestrictedColumns()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertRestrictedColumns(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -1262,8 +1282,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertDuplicateColumnNames()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertDuplicateColumnNames(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery()
|
||||
.setSql(" insert into foo1 SELECT\n"
|
||||
|
@ -1284,8 +1305,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertQueryWithInvalidSubtaskCount()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertQueryWithInvalidSubtaskCount(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = new HashMap<>(context);
|
||||
localContext.put(MultiStageQueryContext.CTX_MAX_NUM_TASKS, 1);
|
||||
|
@ -1306,10 +1328,11 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyExecutionError();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertWithTooLargeRowShouldThrowException() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertWithTooLargeRowShouldThrowException(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
Mockito.doReturn(500).when(workerMemoryParameters).getLargeFrameSize();
|
||||
|
@ -1335,8 +1358,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyExecutionError();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertLimitWithPeriodGranularityThrowsException()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertLimitWithPeriodGranularityThrowsException(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery().setSql(" INSERT INTO foo "
|
||||
+ "SELECT __time, m1 "
|
||||
|
@ -1352,8 +1376,9 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertOffsetThrowsException()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testInsertOffsetThrowsException(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery().setSql(" INSERT INTO foo "
|
||||
+ "SELECT __time, m1 "
|
||||
|
@ -1368,20 +1393,21 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCorrectNumberOfWorkersUsedAutoModeWithoutBytesLimit() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testCorrectNumberOfWorkersUsedAutoModeWithoutBytesLimit(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
Map<String, Object> localContext = new HashMap<>(context);
|
||||
localContext.put(MultiStageQueryContext.CTX_TASK_ASSIGNMENT_STRATEGY, WorkerAssignmentStrategy.AUTO.name());
|
||||
localContext.put(MultiStageQueryContext.CTX_MAX_NUM_TASKS, 4);
|
||||
|
||||
final File toRead1 = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/multipleFiles/wikipedia-sampled-1.json");
|
||||
final File toRead1 = getResourceAsTemporaryFile("/multipleFiles/wikipedia-sampled-1.json");
|
||||
final String toReadFileNameAsJson1 = queryFramework().queryJsonMapper().writeValueAsString(toRead1.getAbsolutePath());
|
||||
|
||||
final File toRead2 = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/multipleFiles/wikipedia-sampled-2.json");
|
||||
final File toRead2 = getResourceAsTemporaryFile("/multipleFiles/wikipedia-sampled-2.json");
|
||||
final String toReadFileNameAsJson2 = queryFramework().queryJsonMapper().writeValueAsString(toRead2.getAbsolutePath());
|
||||
|
||||
final File toRead3 = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/multipleFiles/wikipedia-sampled-3.json");
|
||||
final File toRead3 = getResourceAsTemporaryFile("/multipleFiles/wikipedia-sampled-3.json");
|
||||
final String toReadFileNameAsJson3 = queryFramework().queryJsonMapper().writeValueAsString(toRead3.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1418,21 +1444,22 @@ public class MSQInsertTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCorrectNumberOfWorkersUsedAutoModeWithBytesLimit() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testCorrectNumberOfWorkersUsedAutoModeWithBytesLimit(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
Map<String, Object> localContext = new HashMap<>(context);
|
||||
localContext.put(MultiStageQueryContext.CTX_TASK_ASSIGNMENT_STRATEGY, WorkerAssignmentStrategy.AUTO.name());
|
||||
localContext.put(MultiStageQueryContext.CTX_MAX_NUM_TASKS, 4);
|
||||
localContext.put(MultiStageQueryContext.CTX_MAX_INPUT_BYTES_PER_WORKER, 10);
|
||||
|
||||
final File toRead1 = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/multipleFiles/wikipedia-sampled-1.json");
|
||||
final File toRead1 = getResourceAsTemporaryFile("/multipleFiles/wikipedia-sampled-1.json");
|
||||
final String toReadFileNameAsJson1 = queryFramework().queryJsonMapper().writeValueAsString(toRead1.getAbsolutePath());
|
||||
|
||||
final File toRead2 = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/multipleFiles/wikipedia-sampled-2.json");
|
||||
final File toRead2 = getResourceAsTemporaryFile("/multipleFiles/wikipedia-sampled-2.json");
|
||||
final String toReadFileNameAsJson2 = queryFramework().queryJsonMapper().writeValueAsString(toRead2.getAbsolutePath());
|
||||
|
||||
final File toRead3 = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/multipleFiles/wikipedia-sampled-3.json");
|
||||
final File toRead3 = getResourceAsTemporaryFile("/multipleFiles/wikipedia-sampled-3.json");
|
||||
final String toReadFileNameAsJson3 = queryFramework().queryJsonMapper().writeValueAsString(toRead3.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1468,9 +1495,11 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyInsertQuery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyInsertQuery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
|
||||
// Insert with a condition which results in 0 rows being inserted -- do nothing.
|
||||
testIngestQuery().setSql(
|
||||
"INSERT INTO foo1 "
|
||||
|
@ -1484,9 +1513,11 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyInsertQueryWithAllGranularity()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyInsertQueryWithAllGranularity(String contextName, Map<String, Object> context)
|
||||
{
|
||||
|
||||
// Insert with a condition which results in 0 rows being inserted -- do nothing.
|
||||
testIngestQuery().setSql(
|
||||
"INSERT INTO foo1 "
|
||||
|
@ -1500,9 +1531,11 @@ public class MSQInsertTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyInsertLimitQuery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyInsertLimitQuery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
|
||||
// Insert with a condition which results in 0 rows being inserted -- do nothing.
|
||||
testIngestQuery().setSql(
|
||||
"INSERT INTO foo1 "
|
||||
|
|
|
@ -51,8 +51,8 @@ import org.apache.druid.timeline.DataSegment;
|
|||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -87,7 +87,7 @@ public class MSQLoadedSegmentTests extends MSQTestBase
|
|||
2
|
||||
);
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp()
|
||||
{
|
||||
loadedSegmentsMetadata.add(new ImmutableSegmentLoadInfo(LOADED_SEGMENT_1, ImmutableSet.of(DATA_SERVER_1)));
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.druid.msq.indexing.error.CannotParseExternalDataFault;
|
|||
import org.apache.druid.msq.indexing.error.InvalidNullByteFault;
|
||||
import org.apache.druid.msq.querykit.scan.ExternalColumnSelectorFactory;
|
||||
import org.apache.druid.msq.test.MSQTestBase;
|
||||
import org.apache.druid.msq.test.MSQTestFileUtils;
|
||||
import org.apache.druid.query.dimension.DefaultDimensionSpec;
|
||||
import org.apache.druid.query.groupby.GroupByQuery;
|
||||
import org.apache.druid.query.scan.ScanQuery;
|
||||
|
@ -44,7 +43,7 @@ import org.apache.druid.sql.calcite.filtration.Filtration;
|
|||
import org.apache.druid.sql.calcite.planner.ColumnMapping;
|
||||
import org.apache.druid.sql.calcite.planner.ColumnMappings;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -53,15 +52,11 @@ import java.util.Map;
|
|||
|
||||
public class MSQParseExceptionsTest extends MSQTestBase
|
||||
{
|
||||
|
||||
|
||||
@Test
|
||||
public void testIngestWithNullByte() throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(
|
||||
temporaryFolder,
|
||||
this,
|
||||
"/unparseable-null-byte-string.csv"
|
||||
);
|
||||
final File toRead = getResourceAsTemporaryFile("/unparseable-null-byte-string.csv");
|
||||
final String toReadAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -145,11 +140,7 @@ public class MSQParseExceptionsTest extends MSQTestBase
|
|||
@Test
|
||||
public void testIngestWithSanitizedNullByte() throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(
|
||||
temporaryFolder,
|
||||
this,
|
||||
"/unparseable-null-byte-string.csv"
|
||||
);
|
||||
final File toRead = getResourceAsTemporaryFile("/unparseable-null-byte-string.csv");
|
||||
final String toReadAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -243,11 +234,7 @@ public class MSQParseExceptionsTest extends MSQTestBase
|
|||
@Test
|
||||
public void testMultiValueStringWithIncorrectType() throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(
|
||||
temporaryFolder,
|
||||
this,
|
||||
"/unparseable-mv-string-array.json"
|
||||
);
|
||||
final File toRead = getResourceAsTemporaryFile("/unparseable-mv-string-array.json");
|
||||
final String toReadAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.druid.java.util.common.Intervals;
|
|||
import org.apache.druid.java.util.common.StringUtils;
|
||||
import org.apache.druid.msq.test.CounterSnapshotMatcher;
|
||||
import org.apache.druid.msq.test.MSQTestBase;
|
||||
import org.apache.druid.msq.test.MSQTestFileUtils;
|
||||
import org.apache.druid.msq.test.MSQTestTaskActionClient;
|
||||
import org.apache.druid.segment.column.ColumnType;
|
||||
import org.apache.druid.segment.column.RowSignature;
|
||||
|
@ -40,13 +39,13 @@ import org.apache.druid.timeline.SegmentId;
|
|||
import org.apache.druid.timeline.partition.DimensionRangeShardSpec;
|
||||
import org.easymock.EasyMock;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -58,7 +57,6 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
public class MSQReplaceTest extends MSQTestBase
|
||||
{
|
||||
|
||||
|
@ -72,7 +70,6 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
)
|
||||
.build();
|
||||
|
||||
@Parameterized.Parameters(name = "{index}:with context {0}")
|
||||
public static Collection<Object[]> data()
|
||||
{
|
||||
Object[][] data = new Object[][]{
|
||||
|
@ -84,15 +81,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
};
|
||||
return Arrays.asList(data);
|
||||
}
|
||||
|
||||
@Parameterized.Parameter(0)
|
||||
public String contextName;
|
||||
|
||||
@Parameterized.Parameter(1)
|
||||
public Map<String, Object> context;
|
||||
|
||||
@Test
|
||||
public void testReplaceOnFooWithAll()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceOnFooWithAll(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -172,8 +163,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceOnFooWithWhere()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceOnFooWithWhere(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -221,14 +213,15 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceOnFoo1WithAllExtern() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceOnFoo1WithAllExtern(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
.add("cnt", ColumnType.LONG).build();
|
||||
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
testIngestQuery().setSql(" REPLACE INTO foo1 OVERWRITE ALL SELECT "
|
||||
|
@ -296,14 +289,15 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceOnFoo1WithWhereExtern() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceOnFoo1WithWhereExtern(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
.add("user", ColumnType.STRING).build();
|
||||
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
testIngestQuery().setSql(
|
||||
|
@ -362,8 +356,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceIncorrectSyntax()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceIncorrectSyntax(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery()
|
||||
.setSql("REPLACE INTO foo1 OVERWRITE SELECT * FROM foo PARTITIONED BY ALL TIME")
|
||||
|
@ -376,8 +371,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceSegmentEntireTable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceSegmentEntireTable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -433,8 +429,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceSegmentsRepartitionTable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceSegmentsRepartitionTable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -516,8 +513,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceWithWhereClause()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceWithWhereClause(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -589,8 +587,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceWhereClauseLargerThanData()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceWhereClauseLargerThanData(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -665,8 +664,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceLimitWithPeriodGranularityThrowsException()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceLimitWithPeriodGranularityThrowsException(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery().setSql(" REPLACE INTO foo "
|
||||
+ "OVERWRITE ALL "
|
||||
|
@ -681,8 +681,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceOffsetThrowsException()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceOffsetThrowsException(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testIngestQuery().setSql(" REPLACE INTO foo "
|
||||
+ "OVERWRITE ALL "
|
||||
|
@ -698,8 +699,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceTimeChunks()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceTimeChunks(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -745,8 +747,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceTimeChunksLargerThanData()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceTimeChunksLargerThanData(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -800,8 +803,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceAllOverEternitySegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceAllOverEternitySegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -852,8 +856,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceOnFoo1Range()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceOnFoo1Range(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -875,8 +880,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceOnFoo1RangeClusteredBySubset()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceOnFoo1RangeClusteredBySubset(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -913,8 +919,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceSegmentsInsertIntoNewTable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceSegmentsInsertIntoNewTable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -944,8 +951,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceWithClusteredByDescendingThrowsException()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceWithClusteredByDescendingThrowsException(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Add a DESC clustered by column, which should not be allowed
|
||||
testIngestQuery().setSql(" REPLACE INTO foobar "
|
||||
|
@ -961,8 +969,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceUnnestSegmentEntireTable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceUnnestSegmentEntireTable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -1020,8 +1029,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceUnnestWithVirtualColumnSegmentEntireTable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceUnnestWithVirtualColumnSegmentEntireTable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -1083,8 +1093,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceUnnestSegmentWithTimeFilter()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceUnnestSegmentWithTimeFilter(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -1157,8 +1168,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReplaceTombstonesOverPartiallyOverlappingSegments()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testReplaceTombstonesOverPartiallyOverlappingSegments(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -1223,8 +1235,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceAll()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceAll(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// An empty replace all with no used segment should effectively be the same as an empty insert
|
||||
testIngestQuery().setSql(
|
||||
|
@ -1241,8 +1254,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceInterval()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceInterval(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// An empty replace interval with no used segment should effectively be the same as an empty insert
|
||||
testIngestQuery().setSql(
|
||||
|
@ -1259,8 +1273,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceAllOverExistingSegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceAllOverExistingSegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Interval existingSegmentInterval = Intervals.of("2001-01-01T/2001-01-02T");
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1290,8 +1305,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceIntervalOverPartiallyOverlappingSegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceIntervalOverPartiallyOverlappingSegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a data segment which lies partially outside the generated segment
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1325,8 +1341,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceIntervalOverPartiallyOverlappingStart()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceIntervalOverPartiallyOverlappingStart(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a data segment whose start partially lies outside the query's replace interval
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1362,8 +1379,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceIntervalOverPartiallyOverlappingEnd()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceIntervalOverPartiallyOverlappingEnd(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a data segment whose end partially lies outside the query's replace interval
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1399,8 +1417,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceAllOverEternitySegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceAllOverEternitySegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a data segment spanning eternity
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1432,8 +1451,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceAllWithAllGrainOverFiniteIntervalSegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceAllWithAllGrainOverFiniteIntervalSegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a finite-interval segment
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1463,8 +1483,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceAllWithAllGrainOverEternitySegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceAllWithAllGrainOverEternitySegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a segment spanning eternity
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1495,8 +1516,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceAllWithAllGrainOverHalfEternitySegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceAllWithAllGrainOverHalfEternitySegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a segment spanning half-eternity
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
@ -1526,8 +1548,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceLimitQuery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceLimitQuery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// A limit query which results in 0 rows being inserted -- do nothing.
|
||||
testIngestQuery().setSql(
|
||||
|
@ -1544,8 +1567,9 @@ public class MSQReplaceTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyReplaceIntervalOverEternitySegment()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testEmptyReplaceIntervalOverEternitySegment(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Create a data segment spanning eternity
|
||||
DataSegment existingDataSegment = DataSegment.builder()
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.apache.druid.msq.indexing.report.MSQResultsReport;
|
|||
import org.apache.druid.msq.querykit.common.SortMergeJoinFrameProcessorFactory;
|
||||
import org.apache.druid.msq.test.CounterSnapshotMatcher;
|
||||
import org.apache.druid.msq.test.MSQTestBase;
|
||||
import org.apache.druid.msq.test.MSQTestFileUtils;
|
||||
import org.apache.druid.msq.util.MultiStageQueryContext;
|
||||
import org.apache.druid.query.InlineDataSource;
|
||||
import org.apache.druid.query.LookupDataSource;
|
||||
|
@ -81,10 +80,9 @@ import org.apache.druid.sql.calcite.planner.PlannerContext;
|
|||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
|
@ -98,7 +96,6 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
public class MSQSelectTest extends MSQTestBase
|
||||
{
|
||||
|
||||
|
@ -126,7 +123,6 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.build();
|
||||
|
||||
@Parameterized.Parameters(name = "{index}:with context {0}")
|
||||
public static Collection<Object[]> data()
|
||||
{
|
||||
Object[][] data = new Object[][]{
|
||||
|
@ -140,15 +136,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
|
||||
return Arrays.asList(data);
|
||||
}
|
||||
|
||||
@Parameterized.Parameter(0)
|
||||
public String contextName;
|
||||
|
||||
@Parameterized.Parameter(1)
|
||||
public Map<String, Object> context;
|
||||
|
||||
@Test
|
||||
public void testCalculator()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testCalculator(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("EXPR$0", ColumnType.LONG)
|
||||
|
@ -173,7 +163,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -183,8 +173,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.setExpectedResultRows(ImmutableList.of(new Object[]{2})).verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnFoo()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnFoo(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -205,7 +196,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -225,8 +216,8 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.setExpectedCountersForStageWorkerChannel(
|
||||
CounterSnapshotMatcher
|
||||
.with()
|
||||
.rows(isPageSizeLimited() ? new long[]{2, 2, 2} : new long[]{6})
|
||||
.frames(isPageSizeLimited() ? new long[]{1, 1, 1} : new long[]{1}),
|
||||
.rows(isPageSizeLimited(contextName) ? new long[]{2, 2, 2} : new long[]{6})
|
||||
.frames(isPageSizeLimited(contextName) ? new long[]{1, 1, 1} : new long[]{1}),
|
||||
0, 0, "shuffle"
|
||||
)
|
||||
.setExpectedResultRows(ImmutableList.of(
|
||||
|
@ -239,8 +230,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)).verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnFoo2()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnFoo2(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("m1", ColumnType.LONG)
|
||||
|
@ -265,7 +257,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.build())
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -290,15 +282,16 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.setExpectedCountersForStageWorkerChannel(
|
||||
CounterSnapshotMatcher
|
||||
.with()
|
||||
.rows(isPageSizeLimited() ? new long[]{1L, 2L} : new long[]{3L})
|
||||
.frames(isPageSizeLimited() ? new long[]{1L, 1L} : new long[]{1L}),
|
||||
.rows(isPageSizeLimited(contextName) ? new long[]{1L, 2L} : new long[]{3L})
|
||||
.frames(isPageSizeLimited(contextName) ? new long[]{1L, 1L} : new long[]{1L}),
|
||||
0, 0, "shuffle"
|
||||
)
|
||||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnFooDuplicateColumnNames()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnFooDuplicateColumnNames(String contextName, Map<String, Object> context)
|
||||
{
|
||||
// Duplicate column names are OK in SELECT statements.
|
||||
|
||||
|
@ -335,7 +328,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(expectedColumnMappings)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -354,8 +347,8 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.setExpectedCountersForStageWorkerChannel(
|
||||
CounterSnapshotMatcher
|
||||
.with()
|
||||
.rows(isPageSizeLimited() ? new long[]{2, 2, 2} : new long[]{6})
|
||||
.frames(isPageSizeLimited() ? new long[]{1, 1, 1} : new long[]{1}),
|
||||
.rows(isPageSizeLimited(contextName) ? new long[]{2, 2, 2} : new long[]{6})
|
||||
.frames(isPageSizeLimited(contextName) ? new long[]{1, 1, 1} : new long[]{1}),
|
||||
0, 0, "shuffle"
|
||||
)
|
||||
.setExpectedResultRows(ImmutableList.of(
|
||||
|
@ -368,8 +361,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)).verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnFooWhereMatchesNoSegments()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnFooWhereMatchesNoSegments(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -398,7 +392,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -409,8 +403,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnFooWhereMatchesNoData()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnFooWhereMatchesNoData(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -432,7 +427,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -443,8 +438,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectAndOrderByOnFooWhereMatchesNoData()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectAndOrderByOnFooWhereMatchesNoData(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -467,7 +463,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -478,8 +474,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByOnFoo()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByOnFoo(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -512,7 +509,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -537,8 +534,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByOrderByDimension()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByOrderByDimension(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("m1", ColumnType.FLOAT)
|
||||
|
@ -579,7 +577,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -613,8 +611,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectWithLimit()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectWithLimit(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -636,7 +635,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -668,8 +667,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)).verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectWithGroupByLimit()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectWithGroupByLimit(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -705,7 +705,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -715,8 +715,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectLookup()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectLookup(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final RowSignature rowSignature = RowSignature.builder().add("EXPR$0", ColumnType.LONG).build();
|
||||
|
||||
|
@ -735,7 +736,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.build())
|
||||
.columnMappings(new ColumnMappings(ImmutableList.of(new ColumnMapping("a0", "EXPR$0"))))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -744,8 +745,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJoinWithLookup()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testJoinWithLookup(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final RowSignature rowSignature =
|
||||
RowSignature.builder()
|
||||
|
@ -791,7 +793,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -809,8 +811,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubquery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSubquery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -840,7 +843,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.query(query)
|
||||
.columnMappings(new ColumnMappings(ImmutableList.of(new ColumnMapping("a0", "cnt"))))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -866,19 +869,21 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBroadcastJoin()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testBroadcastJoin(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testJoin(JoinAlgorithm.BROADCAST);
|
||||
testJoin(contextName, context, JoinAlgorithm.BROADCAST);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSortMergeJoin()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSortMergeJoin(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testJoin(JoinAlgorithm.SORT_MERGE);
|
||||
testJoin(contextName, context, JoinAlgorithm.SORT_MERGE);
|
||||
}
|
||||
|
||||
private void testJoin(final JoinAlgorithm joinAlgorithm)
|
||||
private void testJoin(String contextName, Map<String, Object> context, final JoinAlgorithm joinAlgorithm)
|
||||
{
|
||||
final Map<String, Object> queryContext =
|
||||
ImmutableMap.<String, Object>builder()
|
||||
|
@ -1011,7 +1016,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -1025,8 +1030,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByOrderByAggregation()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByOrderByAggregation(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("m1", ColumnType.FLOAT)
|
||||
|
@ -1069,7 +1075,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -1104,8 +1110,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByOrderByAggregationWithLimit()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByOrderByAggregationWithLimit(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("m1", ColumnType.FLOAT)
|
||||
|
@ -1148,7 +1155,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -1180,8 +1187,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByOrderByAggregationWithLimitAndOffset()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByOrderByAggregationWithLimitAndOffset(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("m1", ColumnType.FLOAT)
|
||||
|
@ -1225,7 +1233,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -1256,10 +1264,11 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExternGroupBy() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testExternGroupBy(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1325,7 +1334,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -1349,13 +1358,14 @@ public class MSQSelectTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testExternSelectWithMultipleWorkers() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testExternSelectWithMultipleWorkers(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
Map<String, Object> multipleWorkerContext = new HashMap<>(context);
|
||||
multipleWorkerContext.put(MultiStageQueryContext.CTX_MAX_NUM_TASKS, 3);
|
||||
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1438,7 +1448,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -1456,8 +1466,8 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.setExpectedCountersForStageWorkerChannel(
|
||||
CounterSnapshotMatcher
|
||||
.with()
|
||||
.rows(isPageSizeLimited() ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{5L})
|
||||
.frames(isPageSizeLimited() ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{1L}),
|
||||
.rows(isPageSizeLimited(contextName) ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{5L})
|
||||
.frames(isPageSizeLimited(contextName) ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{1L}),
|
||||
0, 0, "shuffle"
|
||||
)
|
||||
.setExpectedCountersForStageWorkerChannel(
|
||||
|
@ -1473,12 +1483,12 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.setExpectedCountersForStageWorkerChannel(
|
||||
CounterSnapshotMatcher
|
||||
.with()
|
||||
.rows(isPageSizeLimited() ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{5L})
|
||||
.frames(isPageSizeLimited() ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{1L}),
|
||||
.rows(isPageSizeLimited(contextName) ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{5L})
|
||||
.frames(isPageSizeLimited(contextName) ? new long[]{1L, 1L, 1L, 1L, 1L} : new long[]{1L}),
|
||||
0, 1, "shuffle"
|
||||
);
|
||||
// adding result stage counter checks
|
||||
if (isPageSizeLimited()) {
|
||||
if (isPageSizeLimited(contextName)) {
|
||||
selectTester.setExpectedCountersForStageWorkerChannel(
|
||||
CounterSnapshotMatcher
|
||||
.with().rows(2, 0, 2, 0, 2),
|
||||
|
@ -1500,8 +1510,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
selectTester.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIncorrectSelectQuery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testIncorrectSelectQuery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectQuery()
|
||||
.setSql("select a from ")
|
||||
|
@ -1512,8 +1523,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnInformationSchemaSource()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnInformationSchemaSource(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectQuery()
|
||||
.setSql("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA")
|
||||
|
@ -1524,8 +1536,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnSysSource()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnSysSource(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectQuery()
|
||||
.setSql("SELECT * FROM sys.segments")
|
||||
|
@ -1536,8 +1549,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnSysSourceWithJoin()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnSysSourceWithJoin(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectQuery()
|
||||
.setSql("select s.segment_id, s.num_rows, f.dim1 from sys.segments as s, foo as f")
|
||||
|
@ -1548,8 +1562,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnSysSourceContainingWith()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnSysSourceContainingWith(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectQuery()
|
||||
.setSql("with segment_source as (SELECT * FROM sys.segments) "
|
||||
|
@ -1561,8 +1576,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectOnUserDefinedSourceContainingWith()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectOnUserDefinedSourceContainingWith(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("m1", ColumnType.LONG)
|
||||
|
@ -1590,7 +1606,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
.columnMappings(ColumnMappings.identity(resultSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -1615,15 +1631,16 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.setExpectedCountersForStageWorkerChannel(
|
||||
CounterSnapshotMatcher
|
||||
.with()
|
||||
.rows(isPageSizeLimited() ? new long[]{1, 2} : new long[]{3})
|
||||
.frames(isPageSizeLimited() ? new long[]{1, 1} : new long[]{1}),
|
||||
.rows(isPageSizeLimited(contextName) ? new long[]{1, 2} : new long[]{3})
|
||||
.frames(isPageSizeLimited(contextName) ? new long[]{1, 1} : new long[]{1}),
|
||||
0, 0, "shuffle"
|
||||
)
|
||||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testScanWithMultiValueSelectQuery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testScanWithMultiValueSelectQuery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature expectedScanSignature = RowSignature.builder()
|
||||
.add("dim3", ColumnType.STRING)
|
||||
|
@ -1660,7 +1677,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -1676,8 +1693,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)).verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHavingOnApproximateCountDistinct()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testHavingOnApproximateCountDistinct(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("dim2", ColumnType.STRING)
|
||||
|
@ -1730,7 +1748,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
new ColumnMapping("a0", "col")
|
||||
)))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -1745,8 +1763,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByWithMultiValue()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByWithMultiValue(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = enableMultiValueUnnesting(context, true);
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1784,7 +1803,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -1794,8 +1813,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testGroupByWithMultiValueWithoutGroupByEnable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByWithMultiValueWithoutGroupByEnable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = enableMultiValueUnnesting(context, false);
|
||||
|
||||
|
@ -1811,8 +1831,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyExecutionError();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByWithMultiValueMvToArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByWithMultiValueMvToArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = enableMultiValueUnnesting(context, true);
|
||||
|
||||
|
@ -1857,7 +1878,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -1868,8 +1889,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByArrayWithMultiValueMvToArray()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByArrayWithMultiValueMvToArray(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = enableMultiValueUnnesting(context, true);
|
||||
|
||||
|
@ -1927,7 +1949,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -1936,10 +1958,11 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTimeColumnAggregationFromExtern() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testTimeColumnAggregationFromExtern(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -1976,8 +1999,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyPlanningErrors();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByWithMultiValueMvToArrayWithoutGroupByEnable()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByWithMultiValueMvToArrayWithoutGroupByEnable(String contextName, Map<String, Object> context)
|
||||
{
|
||||
Map<String, Object> localContext = enableMultiValueUnnesting(context, false);
|
||||
|
||||
|
@ -1994,8 +2018,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyExecutionError();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByWithComplexColumnThrowsUnsupportedException()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByWithComplexColumnThrowsUnsupportedException(String contextName, Map<String, Object> context)
|
||||
{
|
||||
testSelectQuery()
|
||||
.setSql("select unique_dim1 from foo2 group by unique_dim1")
|
||||
|
@ -2008,8 +2033,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyExecutionError();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByMultiValueMeasureQuery()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByMultiValueMeasureQuery(String contextName, Map<String, Object> context)
|
||||
{
|
||||
final RowSignature rowSignature = RowSignature.builder()
|
||||
.add("__time", ColumnType.LONG)
|
||||
|
@ -2046,7 +2072,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -2064,8 +2090,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroupByOnFooWithDurableStoragePathAssertions() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testGroupByOnFooWithDurableStoragePathAssertions(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
RowSignature rowSignature = RowSignature.builder()
|
||||
.add("cnt", ColumnType.LONG)
|
||||
|
@ -2100,7 +2127,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
))
|
||||
)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -2118,14 +2145,15 @@ public class MSQSelectTest extends MSQTestBase
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectRowsGetUntruncatedByDefault() throws IOException
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectRowsGetUntruncatedByDefault(String contextName, Map<String, Object> context) throws IOException
|
||||
{
|
||||
RowSignature dummyRowSignature = RowSignature.builder().add("timestamp", ColumnType.LONG).build();
|
||||
|
||||
final int numFiles = 200;
|
||||
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadFileNameAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
String externalFiles = String.join(", ", Collections.nCopies(numFiles, toReadFileNameAsJson));
|
||||
|
@ -2179,7 +2207,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
)
|
||||
))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build())
|
||||
|
@ -2188,10 +2216,13 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJoinUsesDifferentAlgorithm()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testJoinUsesDifferentAlgorithm(String contextName, Map<String, Object> context)
|
||||
{
|
||||
|
||||
|
||||
|
||||
// This test asserts that the join algorithnm used is a different one from that supplied. In sqlCompatible() mode
|
||||
// the query gets planned differently, therefore we do use the sortMerge processor. Instead of having separate
|
||||
// handling, a similar test has been described in CalciteJoinQueryMSQTest, therefore we don't want to repeat that
|
||||
|
@ -2274,7 +2305,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
new ColumnMapping("a0", "cnt")
|
||||
)
|
||||
))
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
|
@ -2293,8 +2324,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectUnnestOnInlineFoo()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectUnnestOnInlineFoo(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("EXPR$0", ColumnType.LONG)
|
||||
|
@ -2333,7 +2365,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.build())
|
||||
.columnMappings(expectedColumnMappings)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -2349,8 +2381,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSelectUnnestOnFoo()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectUnnestOnFoo(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("j0.unnest", ColumnType.STRING)
|
||||
|
@ -2387,7 +2420,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.build())
|
||||
.columnMappings(expectedColumnMappings)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -2417,8 +2450,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSelectUnnestOnQueryFoo()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testSelectUnnestOnQueryFoo(String contextName, Map<String, Object> context)
|
||||
{
|
||||
RowSignature resultSignature = RowSignature.builder()
|
||||
.add("j0.unnest", ColumnType.STRING)
|
||||
|
@ -2475,7 +2509,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.build())
|
||||
.columnMappings(expectedColumnMappings)
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -2494,8 +2528,9 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.verifyResults();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnionAllUsingUnionDataSource()
|
||||
@MethodSource("data")
|
||||
@ParameterizedTest(name = "{index}:with context {0}")
|
||||
public void testUnionAllUsingUnionDataSource(String contextName, Map<String, Object> context)
|
||||
{
|
||||
|
||||
final RowSignature rowSignature = RowSignature.builder()
|
||||
|
@ -2542,7 +2577,7 @@ public class MSQSelectTest extends MSQTestBase
|
|||
.build())
|
||||
.columnMappings(ColumnMappings.identity(rowSignature))
|
||||
.tuningConfig(MSQTuningConfig.defaultConfig())
|
||||
.destination(isDurableStorageDestination()
|
||||
.destination(isDurableStorageDestination(contextName, context)
|
||||
? DurableStorageMSQDestination.INSTANCE
|
||||
: TaskReportMSQDestination.INSTANCE)
|
||||
.build()
|
||||
|
@ -2595,12 +2630,12 @@ public class MSQSelectTest extends MSQTestBase
|
|||
return localContext;
|
||||
}
|
||||
|
||||
public boolean isDurableStorageDestination()
|
||||
private boolean isDurableStorageDestination(String contextName, Map<String, Object> context)
|
||||
{
|
||||
return QUERY_RESULTS_WITH_DURABLE_STORAGE.equals(contextName) || QUERY_RESULTS_WITH_DEFAULT_CONTEXT.equals(context);
|
||||
}
|
||||
|
||||
public boolean isPageSizeLimited()
|
||||
public boolean isPageSizeLimited(String contextName)
|
||||
{
|
||||
return QUERY_RESULTS_WITH_DURABLE_STORAGE.equals(contextName);
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.druid.msq.test.MSQTestBase;
|
|||
import org.apache.druid.msq.test.MSQTestTaskActionClient;
|
||||
import org.apache.druid.segment.column.ColumnType;
|
||||
import org.apache.druid.segment.column.RowSignature;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class InsertLockPreemptedFaultTest extends MSQTestBase
|
||||
{
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.druid.java.util.common.granularity.Granularities;
|
|||
import org.apache.druid.msq.indexing.MSQSpec;
|
||||
import org.apache.druid.msq.indexing.MSQTuningConfig;
|
||||
import org.apache.druid.msq.test.MSQTestBase;
|
||||
import org.apache.druid.msq.test.MSQTestFileUtils;
|
||||
import org.apache.druid.msq.util.MultiStageQueryContext;
|
||||
import org.apache.druid.query.Query;
|
||||
import org.apache.druid.query.QueryContexts;
|
||||
|
@ -45,8 +44,8 @@ import org.apache.druid.sql.calcite.planner.ColumnMapping;
|
|||
import org.apache.druid.sql.calcite.planner.ColumnMappings;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -64,10 +63,10 @@ public class MSQWarningsTest extends MSQTestBase
|
|||
private Query<?> defaultQuery;
|
||||
private ColumnMappings defaultColumnMappings;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp3() throws IOException
|
||||
{
|
||||
File tempFile = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/unparseable.gz");
|
||||
File tempFile = getResourceAsTemporaryFile("/unparseable.gz");
|
||||
|
||||
// Rename the file and the file's extension from .tmp to .gz to prevent issues with 'parsing' the file
|
||||
toRead = new File(tempFile.getParentFile(), "unparseable.gz");
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.druid.msq.sql.entity.PageInformation;
|
|||
import org.apache.druid.msq.sql.entity.ResultSetInformation;
|
||||
import org.apache.druid.msq.sql.entity.SqlStatementResult;
|
||||
import org.apache.druid.msq.test.MSQTestBase;
|
||||
import org.apache.druid.msq.test.MSQTestFileUtils;
|
||||
import org.apache.druid.msq.test.MSQTestOverlordServiceClient;
|
||||
import org.apache.druid.msq.util.MultiStageQueryContext;
|
||||
import org.apache.druid.query.ExecutionMode;
|
||||
|
@ -50,11 +49,12 @@ import org.apache.druid.sql.http.ResultFormat;
|
|||
import org.apache.druid.sql.http.SqlQuery;
|
||||
import org.apache.druid.storage.NilStorageConnector;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import javax.ws.rs.core.Response;
|
||||
import javax.ws.rs.core.StreamingOutput;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -69,7 +69,7 @@ public class SqlMSQStatementResourcePostTest extends MSQTestBase
|
|||
private SqlStatementResource resource;
|
||||
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void init()
|
||||
{
|
||||
resource = new SqlStatementResource(
|
||||
|
@ -445,7 +445,7 @@ public class SqlMSQStatementResourcePostTest extends MSQTestBase
|
|||
context.put(MultiStageQueryContext.CTX_ROWS_PER_PAGE, 2);
|
||||
context.put(MultiStageQueryContext.CTX_MAX_NUM_TASKS, 3);
|
||||
|
||||
final File toRead = MSQTestFileUtils.getResourceAsTemporaryFile(temporaryFolder, this, "/wikipedia-sampled.json");
|
||||
final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
|
||||
final String toReadAsJson = queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
|
||||
|
||||
|
||||
|
|
|
@ -92,8 +92,8 @@ import org.jboss.netty.handler.codec.http.HttpResponseStatus;
|
|||
import org.jboss.netty.handler.codec.http.HttpVersion;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
|
@ -699,7 +699,7 @@ public class SqlStatementResourceTest extends MSQTestBase
|
|||
);
|
||||
}
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void init() throws Exception
|
||||
{
|
||||
overlordClient = Mockito.mock(OverlordClient.class);
|
||||
|
@ -708,7 +708,7 @@ public class SqlStatementResourceTest extends MSQTestBase
|
|||
sqlStatementFactory,
|
||||
objectMapper,
|
||||
overlordClient,
|
||||
new LocalFileStorageConnector(tmpFolder.newFolder("local")),
|
||||
new LocalFileStorageConnector(newTempFolder("local")),
|
||||
authorizerMapper
|
||||
);
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ public class CalciteArraysQueryMSQTest extends CalciteArraysQueryTest
|
|||
{
|
||||
super.configureGuice(builder);
|
||||
builder.addModules(
|
||||
CalciteMSQTestsHelper.fetchModules(temporaryFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
|
||||
CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -82,13 +82,14 @@ import org.apache.druid.timeline.DataSegment;
|
|||
import org.apache.druid.timeline.SegmentId;
|
||||
import org.easymock.EasyMock;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.apache.druid.sql.calcite.util.CalciteTests.ARRAYS_DATASOURCE;
|
||||
|
@ -113,10 +114,12 @@ import static org.mockito.Mockito.doThrow;
|
|||
public class CalciteMSQTestsHelper
|
||||
{
|
||||
public static List<Module> fetchModules(
|
||||
TemporaryFolder temporaryFolder,
|
||||
Function<String, File> tempFolderProducer,
|
||||
TestGroupByBuffers groupByBuffers
|
||||
)
|
||||
{
|
||||
File cacheManagerDir = tempFolderProducer.apply("test");
|
||||
File storageDir = tempFolderProducer.apply("localsegments");
|
||||
|
||||
Module customBindings =
|
||||
binder -> {
|
||||
|
@ -152,29 +155,18 @@ public class CalciteMSQTestsHelper
|
|||
);
|
||||
ObjectMapper testMapper = MSQTestBase.setupObjectMapper(dummyInjector);
|
||||
IndexIO indexIO = new IndexIO(testMapper, ColumnConfig.DEFAULT);
|
||||
SegmentCacheManager segmentCacheManager = null;
|
||||
try {
|
||||
segmentCacheManager = new SegmentCacheManagerFactory(testMapper).manufacturate(temporaryFolder.newFolder(
|
||||
"test"));
|
||||
}
|
||||
catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
SegmentCacheManager segmentCacheManager = new SegmentCacheManagerFactory(testMapper)
|
||||
.manufacturate(cacheManagerDir);
|
||||
LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig();
|
||||
MSQTestSegmentManager segmentManager = new MSQTestSegmentManager(segmentCacheManager, indexIO);
|
||||
try {
|
||||
config.storageDirectory = temporaryFolder.newFolder("localsegments");
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ISE(e, "Unable to create folder");
|
||||
}
|
||||
config.storageDirectory = storageDir;
|
||||
binder.bind(DataSegmentPusher.class).toProvider(() -> new MSQTestDelegateDataSegmentPusher(
|
||||
new LocalDataSegmentPusher(config),
|
||||
segmentManager
|
||||
));
|
||||
binder.bind(DataSegmentAnnouncer.class).toInstance(new NoopDataSegmentAnnouncer());
|
||||
binder.bind(DataSegmentProvider.class)
|
||||
.toInstance((segmentId, channelCounters, isReindex) -> getSupplierForSegment(segmentId));
|
||||
.toInstance((segmentId, channelCounters, isReindex) -> getSupplierForSegment(tempFolderProducer, segmentId));
|
||||
binder.bind(DataServerQueryHandlerFactory.class).toInstance(getTestDataServerQueryHandlerFactory());
|
||||
|
||||
GroupByQueryConfig groupByQueryConfig = new GroupByQueryConfig();
|
||||
|
@ -206,116 +198,104 @@ public class CalciteMSQTestsHelper
|
|||
return mockFactory;
|
||||
}
|
||||
|
||||
private static Supplier<ResourceHolder<Segment>> getSupplierForSegment(SegmentId segmentId)
|
||||
private static Supplier<ResourceHolder<Segment>> getSupplierForSegment(Function<String, File> tempFolderProducer, SegmentId segmentId)
|
||||
{
|
||||
final TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
try {
|
||||
temporaryFolder.create();
|
||||
}
|
||||
catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
final QueryableIndex index;
|
||||
try {
|
||||
switch (segmentId.getDataSource()) {
|
||||
case DATASOURCE1:
|
||||
IncrementalIndexSchema foo1Schema = new IncrementalIndexSchema.Builder()
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new FloatSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(foo1Schema)
|
||||
.rows(ROWS1)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE2:
|
||||
final IncrementalIndexSchema indexSchemaDifferentDim3M1Types = new IncrementalIndexSchema.Builder()
|
||||
.withDimensionsSpec(
|
||||
new DimensionsSpec(
|
||||
ImmutableList.of(
|
||||
new StringDimensionSchema("dim1"),
|
||||
new StringDimensionSchema("dim2"),
|
||||
new LongDimensionSchema("dim3")
|
||||
)
|
||||
)
|
||||
)
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new LongSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(indexSchemaDifferentDim3M1Types)
|
||||
.rows(ROWS2)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE3:
|
||||
case CalciteTests.BROADCAST_DATASOURCE:
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(INDEX_SCHEMA_NUMERIC_DIMS)
|
||||
.rows(ROWS1_WITH_NUMERIC_DIMS)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE5:
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(INDEX_SCHEMA_LOTS_O_COLUMNS)
|
||||
.rows(ROWS_LOTS_OF_COLUMNS)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case ARRAYS_DATASOURCE:
|
||||
index = IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
.withTimestampSpec(NestedDataTestUtils.AUTO_SCHEMA.getTimestampSpec())
|
||||
.withDimensionsSpec(NestedDataTestUtils.AUTO_SCHEMA.getDimensionsSpec())
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build()
|
||||
)
|
||||
.inputSource(
|
||||
ResourceInputSource.of(
|
||||
NestedDataTestUtils.class.getClassLoader(),
|
||||
NestedDataTestUtils.ARRAY_TYPES_DATA_FILE
|
||||
)
|
||||
)
|
||||
.inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
|
||||
.inputTmpDir(temporaryFolder.newFolder())
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case CalciteTests.WIKIPEDIA_FIRST_LAST:
|
||||
index = TestDataBuilder.makeWikipediaIndexWithAggregation(temporaryFolder.newFolder());
|
||||
break;
|
||||
default:
|
||||
throw new ISE("Cannot query segment %s in test runner", segmentId);
|
||||
switch (segmentId.getDataSource()) {
|
||||
case DATASOURCE1:
|
||||
IncrementalIndexSchema foo1Schema = new IncrementalIndexSchema.Builder()
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new FloatSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(tempFolderProducer.apply("tmpDir"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(foo1Schema)
|
||||
.rows(ROWS1)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE2:
|
||||
final IncrementalIndexSchema indexSchemaDifferentDim3M1Types = new IncrementalIndexSchema.Builder()
|
||||
.withDimensionsSpec(
|
||||
new DimensionsSpec(
|
||||
ImmutableList.of(
|
||||
new StringDimensionSchema("dim1"),
|
||||
new StringDimensionSchema("dim2"),
|
||||
new LongDimensionSchema("dim3")
|
||||
)
|
||||
)
|
||||
)
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new LongSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(tempFolderProducer.apply("tmpDir"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(indexSchemaDifferentDim3M1Types)
|
||||
.rows(ROWS2)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE3:
|
||||
case CalciteTests.BROADCAST_DATASOURCE:
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(tempFolderProducer.apply("tmpDir"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(INDEX_SCHEMA_NUMERIC_DIMS)
|
||||
.rows(ROWS1_WITH_NUMERIC_DIMS)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE5:
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(tempFolderProducer.apply("tmpDir"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(INDEX_SCHEMA_LOTS_O_COLUMNS)
|
||||
.rows(ROWS_LOTS_OF_COLUMNS)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case ARRAYS_DATASOURCE:
|
||||
index = IndexBuilder.create()
|
||||
.tmpDir(tempFolderProducer.apply("tmpDir"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
.withTimestampSpec(NestedDataTestUtils.AUTO_SCHEMA.getTimestampSpec())
|
||||
.withDimensionsSpec(NestedDataTestUtils.AUTO_SCHEMA.getDimensionsSpec())
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build()
|
||||
)
|
||||
.inputSource(
|
||||
ResourceInputSource.of(
|
||||
NestedDataTestUtils.class.getClassLoader(),
|
||||
NestedDataTestUtils.ARRAY_TYPES_DATA_FILE
|
||||
)
|
||||
)
|
||||
.inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
|
||||
.inputTmpDir(tempFolderProducer.apply("tmpDir"))
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case CalciteTests.WIKIPEDIA_FIRST_LAST:
|
||||
index = TestDataBuilder.makeWikipediaIndexWithAggregation(tempFolderProducer.apply("tmpDir"));
|
||||
break;
|
||||
default:
|
||||
throw new ISE("Cannot query segment %s in test runner", segmentId);
|
||||
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ISE(e, "Unable to load index for segment %s", segmentId);
|
||||
}
|
||||
Segment segment = new Segment()
|
||||
{
|
||||
|
|
|
@ -36,14 +36,11 @@ import org.apache.druid.sql.calcite.planner.PlannerContext;
|
|||
import org.apache.druid.sql.calcite.run.EngineFeature;
|
||||
import org.apache.druid.sql.calcite.run.QueryMaker;
|
||||
import org.apache.druid.sql.calcite.run.SqlEngine;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/**
|
||||
* Runs {@link CalciteJoinQueryTest} but with MSQ engine.
|
||||
*/
|
||||
@RunWith(Enclosed.class)
|
||||
public abstract class CalciteSelectJoinQueryMSQTest
|
||||
public class CalciteSelectJoinQueryMSQTest
|
||||
{
|
||||
/**
|
||||
* Run all tests with {@link JoinAlgorithm#BROADCAST}.
|
||||
|
@ -87,19 +84,23 @@ public abstract class CalciteSelectJoinQueryMSQTest
|
|||
{
|
||||
private final JoinAlgorithm joinAlgorithm;
|
||||
|
||||
|
||||
protected Base(final JoinAlgorithm joinAlgorithm)
|
||||
{
|
||||
super(joinAlgorithm == JoinAlgorithm.SORT_MERGE);
|
||||
this.joinAlgorithm = joinAlgorithm;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSortBasedJoin()
|
||||
{
|
||||
return joinAlgorithm == JoinAlgorithm.SORT_MERGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configureGuice(DruidInjectorBuilder builder)
|
||||
{
|
||||
super.configureGuice(builder);
|
||||
builder.addModules(
|
||||
CalciteMSQTestsHelper.fetchModules(temporaryFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
|
||||
CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -35,8 +35,11 @@ import org.apache.druid.sql.calcite.CalciteQueryTest;
|
|||
import org.apache.druid.sql.calcite.QueryTestBuilder;
|
||||
import org.apache.druid.sql.calcite.run.SqlEngine;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.Timeout;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Runs {@link CalciteQueryTest} but with MSQ engine
|
||||
|
@ -47,7 +50,7 @@ public class CalciteSelectQueryMSQTest extends CalciteQueryTest
|
|||
public void configureGuice(DruidInjectorBuilder builder)
|
||||
{
|
||||
super.configureGuice(builder);
|
||||
builder.addModules(CalciteMSQTestsHelper.fetchModules(temporaryFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0]));
|
||||
builder.addModules(CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0]));
|
||||
}
|
||||
|
||||
|
||||
|
@ -86,77 +89,88 @@ public class CalciteSelectQueryMSQTest extends CalciteQueryTest
|
|||
.verifyNativeQueries(new VerifyMSQSupportedNativeQueriesPredicate());
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testCannotInsertWithNativeEngine()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testCannotReplaceWithNativeEngine()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testRequireTimeConditionSimpleQueryNegative()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testRequireTimeConditionSubQueryNegative()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testRequireTimeConditionSemiJoinNegative()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testExactCountDistinctWithFilter()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testUnplannableScanOrderByNonTime()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testUnplannableJoinQueriesInNonSQLCompatibleMode()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testQueryWithMoreThanMaxNumericInFilter()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testUnSupportedNullsFirst()
|
||||
{
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Disabled
|
||||
@Override
|
||||
@Test
|
||||
public void testUnSupportedNullsLast()
|
||||
{
|
||||
}
|
||||
|
@ -180,7 +194,8 @@ public class CalciteSelectQueryMSQTest extends CalciteQueryTest
|
|||
}
|
||||
}
|
||||
|
||||
@Test(timeout = 40000)
|
||||
@Test
|
||||
@Timeout(value = 40000, unit = TimeUnit.MILLISECONDS)
|
||||
public void testJoinMultipleTablesWithWhereCondition()
|
||||
{
|
||||
testBuilder()
|
||||
|
@ -217,6 +232,7 @@ public class CalciteSelectQueryMSQTest extends CalciteQueryTest
|
|||
}
|
||||
|
||||
@Override
|
||||
@Test
|
||||
public void testFilterParseLongNullable()
|
||||
{
|
||||
// this isn't really correct in default value mode, the result should be ImmutableList.of(new Object[]{0L})
|
||||
|
|
|
@ -43,8 +43,8 @@ import org.apache.druid.sql.calcite.QueryTestBuilder;
|
|||
import org.apache.druid.sql.calcite.filtration.Filtration;
|
||||
import org.apache.druid.sql.calcite.run.SqlEngine;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
/**
|
||||
* Runs {@link CalciteUnionQueryTest} but with MSQ engine
|
||||
|
@ -56,7 +56,7 @@ public class CalciteUnionQueryMSQTest extends CalciteUnionQueryTest
|
|||
{
|
||||
super.configureGuice(builder);
|
||||
builder.addModules(
|
||||
CalciteMSQTestsHelper.fetchModules(temporaryFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
|
||||
CalciteMSQTestsHelper.fetchModules(this::newTempFolder, TestGroupByBuffers.createDefault()).toArray(new Module[0])
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -113,7 +113,7 @@ public class CalciteUnionQueryMSQTest extends CalciteUnionQueryTest
|
|||
|
||||
}
|
||||
|
||||
@Ignore("Ignored till MSQ can plan UNION ALL with any operand")
|
||||
@Disabled("Ignored till MSQ can plan UNION ALL with any operand")
|
||||
@Test
|
||||
public void testUnionOnSubqueries()
|
||||
{
|
||||
|
|
|
@ -192,13 +192,10 @@ import org.apache.druid.timeline.partition.ShardSpec;
|
|||
import org.apache.druid.timeline.partition.TombstoneShardSpec;
|
||||
import org.easymock.EasyMock;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
@ -218,6 +215,7 @@ import java.util.Optional;
|
|||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -226,6 +224,7 @@ import static org.apache.druid.sql.calcite.util.CalciteTests.DATASOURCE1;
|
|||
import static org.apache.druid.sql.calcite.util.CalciteTests.DATASOURCE2;
|
||||
import static org.apache.druid.sql.calcite.util.TestDataBuilder.ROWS1;
|
||||
import static org.apache.druid.sql.calcite.util.TestDataBuilder.ROWS2;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
|
@ -321,8 +320,6 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
|
||||
private MSQTestSegmentManager segmentManager;
|
||||
private SegmentCacheManager segmentCacheManager;
|
||||
@Rule
|
||||
public TemporaryFolder tmpFolder = new TemporaryFolder();
|
||||
|
||||
private TestGroupByBuffers groupByBuffers;
|
||||
protected final WorkerMemoryParameters workerMemoryParameters = Mockito.spy(
|
||||
|
@ -368,7 +365,7 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
});
|
||||
}
|
||||
|
||||
@After
|
||||
@AfterEach
|
||||
public void tearDown2()
|
||||
{
|
||||
groupByBuffers.close();
|
||||
|
@ -391,7 +388,7 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
// is created in the main injector, but it depends on the SegmentCacheManagerFactory
|
||||
// which depends on the object mapper that the injector will provide, once it
|
||||
// is built, but has not yet been build while we build the SQL engine.
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp2() throws Exception
|
||||
{
|
||||
groupByBuffers = TestGroupByBuffers.createDefault();
|
||||
|
@ -408,12 +405,7 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
ObjectMapper secondMapper = setupObjectMapper(secondInjector);
|
||||
indexIO = new IndexIO(secondMapper, ColumnConfig.DEFAULT);
|
||||
|
||||
try {
|
||||
segmentCacheManager = new SegmentCacheManagerFactory(secondMapper).manufacturate(tmpFolder.newFolder("test"));
|
||||
}
|
||||
catch (IOException exception) {
|
||||
throw new ISE(exception, "Unable to create segmentCacheManager");
|
||||
}
|
||||
segmentCacheManager = new SegmentCacheManagerFactory(secondMapper).manufacturate(newTempFolder("cacheManager"));
|
||||
|
||||
MSQSqlModule sqlModule = new MSQSqlModule();
|
||||
|
||||
|
@ -445,18 +437,13 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
binder.bind(QueryProcessingPool.class)
|
||||
.toInstance(new ForwardingQueryProcessingPool(Execs.singleThreaded("Test-runner-processing-pool")));
|
||||
binder.bind(DataSegmentProvider.class)
|
||||
.toInstance((segmentId, channelCounters, isReindex) -> getSupplierForSegment(segmentId));
|
||||
.toInstance((segmentId, channelCounters, isReindex) -> getSupplierForSegment(this::newTempFolder, segmentId));
|
||||
binder.bind(DataServerQueryHandlerFactory.class).toInstance(getTestDataServerQueryHandlerFactory());
|
||||
binder.bind(IndexIO.class).toInstance(indexIO);
|
||||
binder.bind(SpecificSegmentsQuerySegmentWalker.class).toInstance(qf.walker());
|
||||
|
||||
LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig();
|
||||
try {
|
||||
config.storageDirectory = tmpFolder.newFolder("localsegments");
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ISE(e, "Unable to create folder");
|
||||
}
|
||||
config.storageDirectory = newTempFolder("storageDir");
|
||||
binder.bind(DataSegmentPusher.class).toInstance(new MSQTestDelegateDataSegmentPusher(
|
||||
new LocalDataSegmentPusher(config),
|
||||
segmentManager
|
||||
|
@ -474,7 +461,7 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
StorageConnectorProvider.class,
|
||||
MultiStageQuery.class
|
||||
);
|
||||
localFileStorageDir = tmpFolder.newFolder("fault");
|
||||
localFileStorageDir = newTempFolder("faultStorageDir");
|
||||
localFileStorageConnector = Mockito.spy(
|
||||
new LocalFileStorageConnector(localFileStorageDir)
|
||||
);
|
||||
|
@ -620,71 +607,59 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
}
|
||||
|
||||
@Nonnull
|
||||
private Supplier<ResourceHolder<Segment>> getSupplierForSegment(SegmentId segmentId)
|
||||
private Supplier<ResourceHolder<Segment>> getSupplierForSegment(Function<String, File> tempFolderProducer, SegmentId segmentId)
|
||||
{
|
||||
if (segmentManager.getSegment(segmentId) == null) {
|
||||
final QueryableIndex index;
|
||||
TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
try {
|
||||
temporaryFolder.create();
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ISE(e, "Unable to create temporary folder for tests");
|
||||
}
|
||||
try {
|
||||
switch (segmentId.getDataSource()) {
|
||||
case DATASOURCE1:
|
||||
IncrementalIndexSchema foo1Schema = new IncrementalIndexSchema.Builder()
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new FloatSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(new File(temporaryFolder.newFolder(), "1"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(foo1Schema)
|
||||
.rows(ROWS1)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE2:
|
||||
final IncrementalIndexSchema indexSchemaDifferentDim3M1Types = new IncrementalIndexSchema.Builder()
|
||||
.withDimensionsSpec(
|
||||
new DimensionsSpec(
|
||||
ImmutableList.of(
|
||||
new StringDimensionSchema("dim1"),
|
||||
new StringDimensionSchema("dim2"),
|
||||
new LongDimensionSchema("dim3")
|
||||
)
|
||||
)
|
||||
)
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new LongSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(new File(temporaryFolder.newFolder(), "1"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(indexSchemaDifferentDim3M1Types)
|
||||
.rows(ROWS2)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
default:
|
||||
throw new ISE("Cannot query segment %s in test runner", segmentId);
|
||||
switch (segmentId.getDataSource()) {
|
||||
case DATASOURCE1:
|
||||
IncrementalIndexSchema foo1Schema = new IncrementalIndexSchema.Builder()
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new FloatSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(new File(tempFolderProducer.apply("tmpDir"), "1"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(foo1Schema)
|
||||
.rows(ROWS1)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
case DATASOURCE2:
|
||||
final IncrementalIndexSchema indexSchemaDifferentDim3M1Types = new IncrementalIndexSchema.Builder()
|
||||
.withDimensionsSpec(
|
||||
new DimensionsSpec(
|
||||
ImmutableList.of(
|
||||
new StringDimensionSchema("dim1"),
|
||||
new StringDimensionSchema("dim2"),
|
||||
new LongDimensionSchema("dim3")
|
||||
)
|
||||
)
|
||||
)
|
||||
.withMetrics(
|
||||
new CountAggregatorFactory("cnt"),
|
||||
new LongSumAggregatorFactory("m1", "m1"),
|
||||
new DoubleSumAggregatorFactory("m2", "m2"),
|
||||
new HyperUniquesAggregatorFactory("unique_dim1", "dim1")
|
||||
)
|
||||
.withRollup(false)
|
||||
.build();
|
||||
index = IndexBuilder
|
||||
.create()
|
||||
.tmpDir(new File(tempFolderProducer.apply("tmpDir"), "1"))
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(indexSchemaDifferentDim3M1Types)
|
||||
.rows(ROWS2)
|
||||
.buildMMappedIndex();
|
||||
break;
|
||||
default:
|
||||
throw new ISE("Cannot query segment %s in test runner", segmentId);
|
||||
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ISE(e, "Unable to load index for segment %s", segmentId);
|
||||
}
|
||||
Segment segment = new Segment()
|
||||
{
|
||||
|
@ -1015,7 +990,7 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
() -> runMultiStageQuery(sql, queryContext)
|
||||
);
|
||||
|
||||
MatcherAssert.assertThat(e, expectedValidationErrorMatcher);
|
||||
assertThat(e, expectedValidationErrorMatcher);
|
||||
}
|
||||
|
||||
protected void verifyWorkerCount(CounterSnapshotsTree counterSnapshotsTree)
|
||||
|
@ -1339,7 +1314,7 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
Assert.fail(StringUtils.format("Query did not throw an exception (sql = [%s])", sql));
|
||||
}
|
||||
catch (Exception e) {
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
StringUtils.format("Query error did not match expectations (sql = [%s])", sql),
|
||||
e,
|
||||
expectedExecutionErrorMatcher
|
||||
|
@ -1457,7 +1432,7 @@ public class MSQTestBase extends BaseCalciteQueryTest
|
|||
if (expectedExecutionErrorMatcher == null) {
|
||||
throw new ISE(e, "Query %s failed", sql);
|
||||
}
|
||||
MatcherAssert.assertThat(e, expectedExecutionErrorMatcher);
|
||||
assertThat(e, expectedExecutionErrorMatcher);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.apache.druid.msq.test;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.io.ByteStreams;
|
||||
import org.apache.druid.java.util.common.IOE;
|
||||
import org.apache.druid.java.util.common.StringUtils;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
|
||||
public class MSQTestFileUtils
|
||||
{
|
||||
|
||||
/**
|
||||
* Helper method that copies a resource to a temporary file, then returns it.
|
||||
*/
|
||||
public static File getResourceAsTemporaryFile(TemporaryFolder temporaryFolder, Object object, final String resource) throws IOException
|
||||
{
|
||||
final File file = temporaryFolder.newFile();
|
||||
final InputStream stream = object.getClass().getResourceAsStream(resource);
|
||||
|
||||
if (stream == null) {
|
||||
throw new IOE("No such resource [%s]", resource);
|
||||
}
|
||||
|
||||
ByteStreams.copy(stream, Files.newOutputStream(file.toPath()));
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method that populates a temporary file with {@code numRows} rows and {@code numColumns} columns where the
|
||||
* first column is a string 'timestamp' while the rest are string columns with junk value
|
||||
*/
|
||||
public static File generateTemporaryNdJsonFile(TemporaryFolder temporaryFolder, final int numRows, final int numColumns) throws IOException
|
||||
{
|
||||
final File file = temporaryFolder.newFile();
|
||||
for (int currentRow = 0; currentRow < numRows; ++currentRow) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("{");
|
||||
sb.append("\"timestamp\":\"2016-06-27T00:00:11.080Z\"");
|
||||
for (int currentColumn = 1; currentColumn < numColumns; ++currentColumn) {
|
||||
sb.append(StringUtils.format(",\"column%s\":\"val%s\"", currentColumn, currentRow));
|
||||
}
|
||||
sb.append("}");
|
||||
Files.write(file.toPath(), ImmutableList.of(sb.toString()), StandardCharsets.UTF_8, StandardOpenOption.APPEND);
|
||||
}
|
||||
file.deleteOnExit();
|
||||
return file;
|
||||
}
|
||||
}
|
|
@ -93,6 +93,37 @@
|
|||
</dependency>
|
||||
|
||||
<!-- Tests -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.druid</groupId>
|
||||
<artifactId>druid-processing</artifactId>
|
||||
|
@ -117,11 +148,6 @@
|
|||
<type>test-jar</type>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
|
|
|
@ -62,9 +62,8 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
|
|||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
|
||||
|
@ -81,13 +80,13 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
ComplexMetrics.registerSerde(VarianceSerde.TYPE_NAME, new VarianceSerde());
|
||||
|
||||
final QueryableIndex index =
|
||||
IndexBuilder.create(CalciteTests.getJsonMapper().registerModules(new DruidStatsModule().getJacksonModules()))
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
|
|
@ -125,6 +125,31 @@
|
|||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>nl.jqno.equalsverifier</groupId>
|
||||
<artifactId>equalsverifier</artifactId>
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.druid.segment.column.ColumnType;
|
|||
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
|
||||
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
|
||||
import org.apache.druid.sql.calcite.filtration.Filtration;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class SleepSqlTest extends BaseCalciteQueryTest
|
||||
{
|
||||
|
|
|
@ -62,7 +62,6 @@ import org.joda.time.Interval;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
|
@ -83,7 +82,6 @@ import java.util.stream.IntStream;
|
|||
import static org.easymock.EasyMock.expect;
|
||||
import static org.easymock.EasyMock.mock;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class ParallelIndexSupervisorTaskTest
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -58,17 +58,14 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class PartialDimensionCardinalityTaskTest
|
||||
{
|
||||
private static final ObjectMapper OBJECT_MAPPER = ParallelIndexTestingFactory.createObjectMapper();
|
||||
|
|
|
@ -54,10 +54,8 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -68,7 +66,6 @@ import java.util.function.Supplier;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class PartialDimensionDistributionTaskTest
|
||||
{
|
||||
private static final ObjectMapper OBJECT_MAPPER = ParallelIndexTestingFactory.createObjectMapper();
|
||||
|
|
|
@ -33,9 +33,7 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -45,7 +43,6 @@ import java.util.StringJoiner;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class StringSketchTest
|
||||
{
|
||||
private static final int FACTOR = 2;
|
||||
|
@ -140,7 +137,6 @@ public class StringSketchTest
|
|||
}
|
||||
}
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public static class PartitionTest
|
||||
{
|
||||
private static final StringSketch SKETCH;
|
||||
|
|
|
@ -29,14 +29,11 @@ import org.joda.time.DateTime;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class DefaultIndexTaskInputRowIteratorBuilderTest
|
||||
{
|
||||
public static class BuildTest
|
||||
|
|
36
pom.xml
36
pom.xml
|
@ -1056,7 +1056,7 @@
|
|||
<dependency>
|
||||
<groupId>org.junit</groupId>
|
||||
<artifactId>junit-bom</artifactId>
|
||||
<version>5.10.0</version>
|
||||
<version>5.10.2</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
|
@ -1347,6 +1347,32 @@
|
|||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.openrewrite.maven</groupId>
|
||||
<artifactId>rewrite-maven-plugin</artifactId>
|
||||
<version>5.23.1</version>
|
||||
<configuration>
|
||||
<activeRecipes>
|
||||
<recipe>org.apache.druid.RewriteRules</recipe>
|
||||
</activeRecipes>
|
||||
<configLocation>${maven.multiModuleProjectDirectory}/rewrite.yml</configLocation>
|
||||
<checkstyleDetectionEnabled>false</checkstyleDetectionEnabled>
|
||||
<failOnDryRunResults>true</failOnDryRunResults>
|
||||
<pomCacheEnabled>false</pomCacheEnabled>
|
||||
<skipMavenParsing>true</skipMavenParsing>
|
||||
<exclusions>
|
||||
<exclude>**/*.json</exclude>
|
||||
<exclude>**/*.yaml</exclude>
|
||||
</exclusions>
|
||||
</configuration>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.openrewrite.recipe</groupId>
|
||||
<artifactId>rewrite-testing-frameworks</artifactId>
|
||||
<version>2.4.1</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
|
@ -1573,6 +1599,7 @@
|
|||
</goals>
|
||||
<configuration>
|
||||
<rules>
|
||||
<banDuplicatePomDependencyVersions/>
|
||||
<requireMavenVersion>
|
||||
<version>3.0.0</version>
|
||||
</requireMavenVersion>
|
||||
|
@ -1709,7 +1736,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.1.2</version>
|
||||
<version>3.2.5</version>
|
||||
<configuration>
|
||||
<!-- locale settings must be set on the command line before startup -->
|
||||
<!-- set default options -->
|
||||
|
@ -1741,6 +1768,11 @@
|
|||
<!-- our tests are very verbose, let's keep the volume down -->
|
||||
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||
<forkNode implementation="org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory"/>
|
||||
<!-- Surefire default is to exclude static inner classes; which may lead to the ignore of static inner classes
|
||||
https://maven.apache.org/surefire/maven-surefire-plugin/test-mojo.html#excludes -->
|
||||
<excludes>
|
||||
<exclude/>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
|
|
|
@ -25,8 +25,6 @@ import org.apache.druid.java.util.common.parsers.CloseableIterator;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -41,7 +39,6 @@ import static org.mockito.ArgumentMatchers.any;
|
|||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class HandlingInputRowIteratorTest
|
||||
{
|
||||
public static class AbsentRowTest
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.junit.Before;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
|
@ -61,7 +60,6 @@ import java.nio.channels.Channels;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class FrameTest
|
||||
{
|
||||
// Tests that use good frames built from a standard test file.
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.hamcrest.CoreMatchers;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.runner.RunWith;
|
||||
|
@ -52,7 +51,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class ReadableByteChunksFrameChannelTest
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -58,7 +58,6 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.runner.RunWith;
|
||||
|
@ -84,7 +83,6 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class FrameProcessorExecutorTest
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -62,7 +62,6 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
@ -79,7 +78,6 @@ import java.util.List;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class SuperSorterTest
|
||||
{
|
||||
private static final Logger log = new Logger(SuperSorterTest.class);
|
||||
|
|
|
@ -54,7 +54,6 @@ import org.junit.Assert;
|
|||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
|
@ -65,7 +64,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class FrameStorageAdapterTest
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -32,8 +32,6 @@ import com.google.inject.Injector;
|
|||
import com.google.inject.Module;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.validation.Validation;
|
||||
|
@ -43,7 +41,6 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class DruidSecondaryModuleTest
|
||||
{
|
||||
private static final String PROPERTY_NAME = "druid.injected.val";
|
||||
|
|
|
@ -25,7 +25,6 @@ import com.google.common.collect.Lists;
|
|||
import com.google.common.collect.Ordering;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
|
@ -37,7 +36,6 @@ import java.util.List;
|
|||
import java.util.Random;
|
||||
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class TopNSequenceTest
|
||||
{
|
||||
private static final List<String> EMPTY = Collections.emptyList();
|
||||
|
|
|
@ -29,10 +29,7 @@ import org.joda.time.Duration;
|
|||
import org.joda.time.Period;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class TaskLookupTest
|
||||
{
|
||||
public static class CompleteTaskLookupTest
|
||||
|
|
|
@ -34,10 +34,7 @@ import org.apache.druid.query.timeseries.TimeseriesQuery;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class DruidsTest
|
||||
{
|
||||
private static final String DATASOURCE = "datasource";
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
@ -39,7 +38,6 @@ import org.junit.runners.Parameterized;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class GroupingAggregatorFactoryTest
|
||||
{
|
||||
public static GroupingAggregatorFactory makeFactory(String[] groupings, @Nullable String[] keyDims)
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.joda.time.Interval;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
@ -57,7 +56,6 @@ import java.util.stream.Collectors;
|
|||
import java.util.stream.IntStream;
|
||||
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class ScanQueryRunnerFactoryTest
|
||||
{
|
||||
private static final ScanQueryConfig CONFIG = new ScanQueryConfig()
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.junit.After;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
|
@ -46,7 +45,6 @@ import java.nio.ByteBuffer;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class QueryableIndexStorageAdapterTest
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -24,7 +24,6 @@ import com.google.common.primitives.Ints;
|
|||
import org.apache.druid.java.util.common.StringUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
|
@ -35,7 +34,6 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class VSizeLongSerdeTest
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -42,14 +42,12 @@ import org.junit.AfterClass;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.Arrays;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class ArrayContainsElementFilterTests
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -48,14 +48,12 @@ import org.junit.AfterClass;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.Arrays;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class EqualityFilterTests
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.druid.segment.StorageAdapter;
|
|||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
|
@ -45,7 +44,6 @@ import java.io.Closeable;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class NullFilterTests
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -50,7 +50,6 @@ import org.junit.AfterClass;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
|
@ -59,7 +58,6 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class RangeFilterTests
|
||||
{
|
||||
@RunWith(Parameterized.class)
|
||||
|
|
|
@ -41,9 +41,7 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.MockitoAnnotations;
|
||||
|
@ -52,12 +50,10 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
import java.util.function.Function;
|
||||
import java.util.function.IntFunction;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class IndexedTableJoinMatcherTest
|
||||
{
|
||||
private static final int SIZE = 3;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public static class ConditionMatcherFactoryTest
|
||||
{
|
||||
public static class MakeLongProcessorTest extends InitializedNullHandlingTest
|
||||
|
|
|
@ -24,15 +24,12 @@ import org.apache.druid.java.util.common.IAE;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.MockedStatic;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class ConnectionUriUtilsTest
|
||||
{
|
||||
public static class ThrowIfURLHasNotAllowedPropertiesTest
|
||||
|
|
|
@ -26,13 +26,10 @@ import org.apache.druid.metadata.MapStringDynamicConfigProvider;
|
|||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class DynamicConfigProviderUtilsTest
|
||||
{
|
||||
public static class ThrowIfURLHasNotAllowedPropertiesTest
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
---
|
||||
type: specs.openrewrite.org/v1beta/recipe
|
||||
name: org.apache.druid.RewriteRules
|
||||
recipeList:
|
||||
- org.apache.druid.UpgradeCalciteTestsToJunit5
|
||||
- org.openrewrite.java.testing.junit5.RemoveObsoleteRunners:
|
||||
obsoleteRunners:
|
||||
- org.junit.experimental.runners.Enclosed
|
||||
---
|
||||
type: specs.openrewrite.org/v1beta/recipe
|
||||
name: org.apache.druid.UpgradeCalciteTestsToJunit5
|
||||
preconditions:
|
||||
- org.openrewrite.java.search.FindImplementations:
|
||||
typeName: org.apache.druid.sql.calcite.util.CalciteTestBase
|
||||
recipeList:
|
||||
- org.apache.druid.JUnit4to5Migration
|
||||
- org.openrewrite.staticanalysis.UnnecessaryThrows
|
||||
#- org.openrewrite.java.testing.junit5.StaticImports
|
||||
---
|
||||
type: specs.openrewrite.org/v1beta/recipe
|
||||
name: org.apache.druid.EasyMockRunnerToEasyMockExtension
|
||||
displayName: Replace EasyMock `@RunWith` with `@ExtendWith` usage
|
||||
recipeList:
|
||||
- org.openrewrite.java.testing.junit5.RunnerToExtension:
|
||||
runners:
|
||||
- org.easymock.EasyMockRunner
|
||||
extension: org.easymock.EasyMockExtension
|
||||
---
|
||||
# Customized version of org.openrewrite.java.testing.junit5.JUnit4to5Migration
|
||||
# commented lines are recipes which were disabled
|
||||
type: specs.openrewrite.org/v1beta/recipe
|
||||
name: org.apache.druid.JUnit4to5Migration
|
||||
displayName: Custom JUnit Jupiter migration from JUnit 4.x
|
||||
description: Migrates JUnit 4.x tests to JUnit Jupiter.
|
||||
tags:
|
||||
- junit
|
||||
- testing
|
||||
recipeList:
|
||||
- org.openrewrite.java.testing.junit5.UseWiremockExtension
|
||||
- org.openrewrite.java.testing.junit5.IgnoreToDisabled
|
||||
- org.openrewrite.java.testing.junit5.ThrowingRunnableToExecutable
|
||||
- org.openrewrite.java.testing.junit5.RemoveObsoleteRunners:
|
||||
obsoleteRunners: [org.junit.runners.JUnit4, org.junit.runners.BlockJUnit4ClassRunner]
|
||||
- org.openrewrite.maven.RemovePluginDependency:
|
||||
pluginGroupId: org.apache.maven.plugins
|
||||
pluginArtifactId: maven-surefire-plugin
|
||||
groupId: org.apache.maven.surefire
|
||||
artifactId: surefire-junit*
|
||||
- org.openrewrite.java.testing.junit5.UseHamcrestAssertThat
|
||||
- org.openrewrite.java.testing.junit5.MigrateAssumptions
|
||||
- org.openrewrite.java.testing.junit5.UseMockitoExtension
|
||||
- org.openrewrite.java.testing.junit5.UseTestMethodOrder
|
||||
- org.openrewrite.java.testing.junit5.MigrateJUnitTestCase
|
||||
- org.openrewrite.java.ChangeMethodName:
|
||||
methodPattern: org.junit.Assert assertEquals(.., Object[], Object[])
|
||||
newMethodName: assertArrayEquals
|
||||
#- org.openrewrite.java.testing.junit5.AssertToAssertions
|
||||
- org.openrewrite.java.testing.junit5.CategoryToTag
|
||||
- org.openrewrite.java.testing.junit5.CleanupJUnitImports
|
||||
- org.openrewrite.java.testing.junit5.TemporaryFolderToTempDir
|
||||
- org.openrewrite.java.testing.junit5.TempDirNonFinal
|
||||
- org.openrewrite.java.testing.junit5.TestRuleToTestInfo
|
||||
- org.openrewrite.java.testing.junit5.UpdateBeforeAfterAnnotations
|
||||
- org.openrewrite.java.testing.junit5.UpdateTestAnnotation
|
||||
- org.openrewrite.java.testing.junit5.AddMissingTestBeforeAfterAnnotations
|
||||
- org.openrewrite.java.testing.junit5.ParameterizedRunnerToParameterized
|
||||
- org.openrewrite.java.testing.junit5.JUnitParamsRunnerToParameterized
|
||||
- org.apache.druid.EasyMockRunnerToEasyMockExtension
|
||||
- org.openrewrite.java.testing.junit5.ExpectedExceptionToAssertThrows
|
||||
- org.openrewrite.java.testing.junit5.UpdateMockWebServer
|
||||
- org.openrewrite.java.testing.junit5.VertxUnitToVertxJunit5
|
||||
- org.openrewrite.java.testing.junit5.EnclosedToNested
|
||||
- org.openrewrite.java.testing.junit5.AddMissingNested
|
||||
- org.openrewrite.java.testing.hamcrest.AddHamcrestIfUsed
|
||||
- org.openrewrite.java.testing.junit5.UseXMLUnitLegacy
|
||||
- org.openrewrite.java.dependencies.RemoveDependency:
|
||||
groupId: junit
|
||||
artifactId: junit
|
||||
- org.openrewrite.maven.ExcludeDependency:
|
||||
groupId: junit
|
||||
artifactId: junit
|
||||
- org.openrewrite.maven.RemoveExclusion:
|
||||
groupId: org.testcontainers
|
||||
artifactId: testcontainers
|
||||
exclusionGroupId: junit
|
||||
exclusionArtifactId: junit
|
||||
- org.openrewrite.maven.RemoveExclusion:
|
||||
groupId: org.springframework.boot
|
||||
artifactId: spring-boot-testcontainers
|
||||
exclusionGroupId: junit
|
||||
exclusionArtifactId: junit
|
||||
- org.openrewrite.java.dependencies.RemoveDependency:
|
||||
groupId: org.junit.vintage
|
||||
artifactId: junit-vintage-engine
|
||||
- org.openrewrite.maven.ExcludeDependency:
|
||||
groupId: org.junit.vintage
|
||||
artifactId: junit-vintage-engine
|
||||
- org.openrewrite.java.dependencies.AddDependency:
|
||||
groupId: org.junit.jupiter
|
||||
artifactId: junit-jupiter
|
||||
version: 5.x
|
||||
onlyIfUsing: org.junit.Test
|
||||
scope: test
|
||||
- org.openrewrite.java.dependencies.AddDependency:
|
||||
groupId: org.junit.jupiter
|
||||
artifactId: junit-jupiter
|
||||
version: 5.x
|
||||
onlyIfUsing: org.junit.jupiter.api.Test
|
||||
scope: test
|
||||
acceptTransitive: true
|
||||
- org.openrewrite.java.dependencies.AddDependency:
|
||||
groupId: org.junit.jupiter
|
||||
artifactId: junit-jupiter-api
|
||||
version: 5.x
|
||||
onlyIfUsing: org.junit.Test
|
||||
scope: test
|
||||
- org.openrewrite.java.dependencies.AddDependency:
|
||||
groupId: org.junit.jupiter
|
||||
artifactId: junit-jupiter-api
|
||||
version: 5.x
|
||||
onlyIfUsing: org.junit.jupiter.api.Test
|
||||
scope: test
|
||||
acceptTransitive: true
|
||||
- org.openrewrite.java.dependencies.AddDependency:
|
||||
groupId: org.junit.jupiter
|
||||
artifactId: junit-jupiter-params
|
||||
version: 5.x
|
||||
onlyIfUsing: org.junit.jupiter.params.ParameterizedTest
|
||||
scope: test
|
||||
acceptTransitive: true
|
||||
- org.openrewrite.java.dependencies.UpgradeDependencyVersion:
|
||||
groupId: org.mockito
|
||||
artifactId: "*"
|
||||
newVersion: 3.x
|
||||
overrideManagedVersion: false
|
||||
- org.openrewrite.maven.UpgradePluginVersion:
|
||||
groupId: org.apache.maven.plugins
|
||||
artifactId: maven-surefire-plugin
|
||||
newVersion: 2.22.x
|
||||
- org.openrewrite.maven.UpgradePluginVersion:
|
||||
groupId: org.apache.maven.plugins
|
||||
artifactId: maven-failsafe-plugin
|
||||
newVersion: 2.22.x
|
|
@ -328,6 +328,31 @@
|
|||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.druid.query.QueryUnsupportedException;
|
|||
import org.apache.druid.query.ResourceLimitExceededException;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.runners.Enclosed;
|
||||
import org.junit.rules.ExpectedException;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
@ -54,7 +53,6 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@RunWith(Enclosed.class)
|
||||
public class JsonParserIteratorTest
|
||||
{
|
||||
private static final JavaType JAVA_TYPE = Mockito.mock(JavaType.class);
|
||||
|
|
|
@ -25,6 +25,9 @@ import org.apache.druid.java.util.common.StringUtils;
|
|||
import org.apache.druid.metadata.storage.derby.DerbyConnector;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Assert;
|
||||
import org.junit.jupiter.api.extension.AfterAllCallback;
|
||||
import org.junit.jupiter.api.extension.BeforeAllCallback;
|
||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||
import org.junit.rules.ExternalResource;
|
||||
import org.skife.jdbi.v2.DBI;
|
||||
import org.skife.jdbi.v2.exceptions.UnableToObtainConnectionException;
|
||||
|
@ -190,4 +193,20 @@ public class TestDerbyConnector extends DerbyConnector
|
|||
.toUpperCase(Locale.ENGLISH);
|
||||
}
|
||||
}
|
||||
|
||||
public static class DerbyConnectorRule5 extends DerbyConnectorRule implements BeforeAllCallback, AfterAllCallback
|
||||
{
|
||||
|
||||
@Override
|
||||
public void beforeAll(ExtensionContext context)
|
||||
{
|
||||
before();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterAll(ExtensionContext context)
|
||||
{
|
||||
after();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
15
sql/pom.xml
15
sql/pom.xml
|
@ -203,8 +203,8 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-migrationsupport</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -212,6 +212,17 @@
|
|||
<artifactId>junit-jupiter-params</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.vintage</groupId>
|
||||
<artifactId>junit-vintage-engine</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.opentest4j</groupId>
|
||||
<artifactId>opentest4j</artifactId>
|
||||
<version>1.3.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
|
|
|
@ -33,8 +33,8 @@ import org.joda.time.DateTime;
|
|||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
@ -43,7 +43,7 @@ public class SqlRowTransformerTest extends CalciteTestBase
|
|||
{
|
||||
private RelDataType rowType;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setup()
|
||||
{
|
||||
final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(DruidTypeSystem.INSTANCE);
|
||||
|
|
|
@ -58,7 +58,6 @@ import org.apache.druid.sql.calcite.planner.PlannerFactory;
|
|||
import org.apache.druid.sql.calcite.planner.PrepareResult;
|
||||
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.calcite.util.QueryLogHook;
|
||||
import org.apache.druid.sql.http.SqlQuery;
|
||||
import org.easymock.EasyMock;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
|
@ -68,7 +67,6 @@ import org.junit.Assert;
|
|||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
|
||||
|
@ -92,8 +90,6 @@ public class SqlStatementTest
|
|||
private static Closer resourceCloser;
|
||||
@ClassRule
|
||||
public static TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
@Rule
|
||||
public QueryLogHook queryLogHook = QueryLogHook.create();
|
||||
private TestRequestLogger testRequestLogger;
|
||||
private ListeningExecutorService executorService;
|
||||
private SqlStatementFactory sqlStatementFactory;
|
||||
|
|
|
@ -88,24 +88,23 @@ import org.apache.druid.sql.calcite.schema.DruidSchemaName;
|
|||
import org.apache.druid.sql.calcite.schema.NamedSchema;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTestBase;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.calcite.util.QueryLogHook;
|
||||
import org.apache.druid.sql.guice.SqlModule;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.Timeout;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.skife.jdbi.v2.DBI;
|
||||
import org.skife.jdbi.v2.Handle;
|
||||
import org.skife.jdbi.v2.ResultIterator;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.Array;
|
||||
import java.sql.Connection;
|
||||
|
@ -129,6 +128,7 @@ import java.util.Set;
|
|||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Tests the Avatica-based JDBC implementation using JSON serialization. See
|
||||
|
@ -153,33 +153,27 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
|
|||
|
||||
private static final String DUMMY_SQL_QUERY_ID = "dummy";
|
||||
|
||||
@ClassRule
|
||||
public static TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
private static QueryRunnerFactoryConglomerate conglomerate;
|
||||
private static SpecificSegmentsQuerySegmentWalker walker;
|
||||
private static Closer resourceCloser;
|
||||
|
||||
private final boolean nullNumeric = !NullHandling.replaceWithDefault();
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception
|
||||
@BeforeAll
|
||||
public static void setUpClass(@TempDir File tempDir)
|
||||
{
|
||||
resourceCloser = Closer.create();
|
||||
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser);
|
||||
walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder());
|
||||
walker = CalciteTests.createMockWalker(conglomerate, tempDir);
|
||||
resourceCloser.register(walker);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
@AfterAll
|
||||
public static void tearDownClass() throws IOException
|
||||
{
|
||||
resourceCloser.close();
|
||||
}
|
||||
|
||||
@Rule
|
||||
public QueryLogHook queryLogHook = QueryLogHook.create();
|
||||
|
||||
private final PlannerConfig plannerConfig = new PlannerConfig();
|
||||
private final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
|
||||
private final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
|
||||
|
@ -266,7 +260,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
|
|||
);
|
||||
}
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception
|
||||
{
|
||||
final DruidSchemaCatalog rootSchema = makeRootSchema();
|
||||
|
@ -328,7 +322,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
|
|||
clientLosAngeles = DriverManager.getConnection(server.url, propertiesLosAngeles);
|
||||
}
|
||||
|
||||
@After
|
||||
@AfterEach
|
||||
public void tearDown() throws Exception
|
||||
{
|
||||
if (server != null) {
|
||||
|
@ -858,49 +852,45 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
|
|||
);
|
||||
}
|
||||
|
||||
@Test(timeout = 90_000L)
|
||||
@Test
|
||||
@Timeout(value = 90_000L, unit = TimeUnit.MILLISECONDS)
|
||||
public void testConcurrentQueries()
|
||||
{
|
||||
queryLogHook.withSkippedLog(
|
||||
v -> {
|
||||
final List<ListenableFuture<Integer>> futures = new ArrayList<>();
|
||||
final ListeningExecutorService exec = MoreExecutors.listeningDecorator(
|
||||
Execs.multiThreaded(AVATICA_CONFIG.getMaxStatementsPerConnection(), "DruidAvaticaHandlerTest-%d")
|
||||
);
|
||||
for (int i = 0; i < 2000; i++) {
|
||||
final String query = StringUtils.format("SELECT COUNT(*) + %s AS ci FROM foo", i);
|
||||
futures.add(
|
||||
exec.submit(() -> {
|
||||
try (
|
||||
final Statement statement = client.createStatement();
|
||||
final ResultSet resultSet = statement.executeQuery(query)
|
||||
) {
|
||||
final List<Map<String, Object>> rows = getRows(resultSet);
|
||||
return ((Number) Iterables.getOnlyElement(rows).get("ci")).intValue();
|
||||
}
|
||||
catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
final List<Integer> integers;
|
||||
try {
|
||||
integers = Futures.allAsList(futures).get();
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
throw new RE(e);
|
||||
}
|
||||
catch (ExecutionException e) {
|
||||
throw new RE(e);
|
||||
}
|
||||
for (int i = 0; i < 2000; i++) {
|
||||
Assert.assertEquals(i + 6, (int) integers.get(i));
|
||||
}
|
||||
exec.shutdown();
|
||||
}
|
||||
final List<ListenableFuture<Integer>> futures = new ArrayList<>();
|
||||
final ListeningExecutorService exec = MoreExecutors.listeningDecorator(
|
||||
Execs.multiThreaded(AVATICA_CONFIG.getMaxStatementsPerConnection(), "DruidAvaticaHandlerTest-%d")
|
||||
);
|
||||
for (int i = 0; i < 2000; i++) {
|
||||
final String query = StringUtils.format("SELECT COUNT(*) + %s AS ci FROM foo", i);
|
||||
futures.add(
|
||||
exec.submit(() -> {
|
||||
try (
|
||||
final Statement statement = client.createStatement();
|
||||
final ResultSet resultSet = statement.executeQuery(query)) {
|
||||
final List<Map<String, Object>> rows = getRows(resultSet);
|
||||
return ((Number) Iterables.getOnlyElement(rows).get("ci")).intValue();
|
||||
}
|
||||
catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
final List<Integer> integers;
|
||||
try {
|
||||
integers = Futures.allAsList(futures).get();
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
throw new RE(e);
|
||||
}
|
||||
catch (ExecutionException e) {
|
||||
throw new RE(e);
|
||||
}
|
||||
for (int i = 0; i < 2000; i++) {
|
||||
Assert.assertEquals(i + 6, (int) integers.get(i));
|
||||
}
|
||||
exec.shutdown();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -47,17 +47,15 @@ import org.apache.druid.sql.calcite.planner.PlannerFactory;
|
|||
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTestBase;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.calcite.util.QueryLogHook;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -73,26 +71,20 @@ public class DruidStatementTest extends CalciteTestBase
|
|||
private static String SELECT_STAR_FROM_FOO =
|
||||
"SELECT * FROM druid.foo";
|
||||
|
||||
@ClassRule
|
||||
public static TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
@Rule
|
||||
public QueryLogHook queryLogHook = QueryLogHook.create();
|
||||
|
||||
private static SpecificSegmentsQuerySegmentWalker walker;
|
||||
private static QueryRunnerFactoryConglomerate conglomerate;
|
||||
private static Closer resourceCloser;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception
|
||||
@BeforeAll
|
||||
public static void setUpClass(@TempDir File tempDir)
|
||||
{
|
||||
resourceCloser = Closer.create();
|
||||
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser);
|
||||
walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder());
|
||||
walker = CalciteTests.createMockWalker(conglomerate, tempDir);
|
||||
resourceCloser.register(walker);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
@AfterAll
|
||||
public static void tearDownClass() throws IOException
|
||||
{
|
||||
resourceCloser.close();
|
||||
|
@ -100,7 +92,7 @@ public class DruidStatementTest extends CalciteTestBase
|
|||
|
||||
private SqlStatementFactory sqlStatementFactory;
|
||||
|
||||
@Before
|
||||
@BeforeEach
|
||||
public void setUp()
|
||||
{
|
||||
final PlannerConfig plannerConfig = new PlannerConfig();
|
||||
|
@ -128,7 +120,7 @@ public class DruidStatementTest extends CalciteTestBase
|
|||
);
|
||||
}
|
||||
|
||||
@After
|
||||
@AfterEach
|
||||
public void tearDown()
|
||||
{
|
||||
|
||||
|
|
|
@ -26,9 +26,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
|
|||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.io.ByteStreams;
|
||||
import com.google.inject.Injector;
|
||||
import org.apache.commons.text.StringEscapeUtils;
|
||||
import org.apache.druid.annotations.UsedByJUnitParamsRunner;
|
||||
import org.apache.druid.common.config.NullHandling;
|
||||
import org.apache.druid.error.DruidException;
|
||||
import org.apache.druid.error.DruidException.Category;
|
||||
|
@ -38,6 +38,7 @@ import org.apache.druid.guice.DruidInjectorBuilder;
|
|||
import org.apache.druid.hll.VersionOneHyperLogLogCollector;
|
||||
import org.apache.druid.java.util.common.DateTimes;
|
||||
import org.apache.druid.java.util.common.Intervals;
|
||||
import org.apache.druid.java.util.common.RE;
|
||||
import org.apache.druid.java.util.common.StringUtils;
|
||||
import org.apache.druid.java.util.common.granularity.Granularity;
|
||||
import org.apache.druid.java.util.common.io.Closer;
|
||||
|
@ -104,7 +105,6 @@ import org.apache.druid.sql.calcite.run.SqlEngine;
|
|||
import org.apache.druid.sql.calcite.schema.DruidSchemaManager;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTestBase;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.calcite.util.QueryLogHook;
|
||||
import org.apache.druid.sql.calcite.util.SqlTestFramework;
|
||||
import org.apache.druid.sql.calcite.util.SqlTestFramework.Builder;
|
||||
import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier;
|
||||
|
@ -116,20 +116,21 @@ import org.apache.druid.sql.calcite.view.ViewManager;
|
|||
import org.apache.druid.sql.http.SqlParameter;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Interval;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -142,10 +143,11 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
import static org.junit.Assume.assumeFalse;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.junit.jupiter.api.Assumptions.assumeFalse;
|
||||
import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
|
||||
/**
|
||||
* A base class for SQL query testing. It sets up query execution environment, provides useful helper methods,
|
||||
|
@ -160,7 +162,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
public static Long NULL_LONG;
|
||||
public static final String HLLC_STRING = VersionOneHyperLogLogCollector.class.getName();
|
||||
|
||||
@BeforeClass
|
||||
@BeforeAll
|
||||
public static void setupNullValues()
|
||||
{
|
||||
NULL_STRING = NullHandling.defaultStringValue();
|
||||
|
@ -296,14 +298,9 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
public final SqlEngine engine0;
|
||||
final boolean useDefault = NullHandling.replaceWithDefault();
|
||||
|
||||
@Rule(order = 2)
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
||||
public boolean cannotVectorize = false;
|
||||
public boolean skipVectorize = false;
|
||||
|
||||
public QueryLogHook queryLogHook;
|
||||
|
||||
private QueryComponentSupplier baseComponentSupplier;
|
||||
public PlannerComponentSupplier basePlannerComponentSupplier = new StandardPlannerComponentSupplier();
|
||||
|
||||
|
@ -630,22 +627,8 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
return DruidExceptionMatcher.invalidSqlInput().expectMessageContains(s);
|
||||
}
|
||||
|
||||
@Rule
|
||||
public QueryLogHook getQueryLogHook()
|
||||
{
|
||||
// Indirection for the JSON mapper. Otherwise, this rule method is called
|
||||
// before Setup is called, causing the query framework to be built before
|
||||
// tests have done their setup. The indirection means we access the query
|
||||
// framework only when we log the first query. By then, the query framework
|
||||
// will have been created via the normal path.
|
||||
return queryLogHook = new QueryLogHook(() -> queryFramework().queryJsonMapper());
|
||||
}
|
||||
|
||||
@ClassRule
|
||||
public static SqlTestFrameworkConfig.ClassRule queryFrameworkClassRule = new SqlTestFrameworkConfig.ClassRule();
|
||||
|
||||
@Rule(order = 3)
|
||||
public SqlTestFrameworkConfig.MethodRule queryFrameworkRule = queryFrameworkClassRule.methodRule(this);
|
||||
@RegisterExtension
|
||||
static SqlTestFrameworkConfig.Rule queryFrameworkRule = new SqlTestFrameworkConfig.Rule();
|
||||
|
||||
public SqlTestFramework queryFramework()
|
||||
{
|
||||
|
@ -657,7 +640,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
return baseComponentSupplier.createQuerySegmentWalker(conglomerate, joinableFactory, injector);
|
||||
}
|
||||
|
@ -679,14 +662,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
@Override
|
||||
public void gatherProperties(Properties properties)
|
||||
{
|
||||
try {
|
||||
baseComponentSupplier = new StandardComponentSupplier(
|
||||
temporaryFolder.newFolder()
|
||||
);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
baseComponentSupplier = new StandardComponentSupplier(newTempFolder());
|
||||
baseComponentSupplier.gatherProperties(properties);
|
||||
}
|
||||
|
||||
|
@ -753,7 +729,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
public void assumeFeatureAvailable(EngineFeature feature)
|
||||
{
|
||||
boolean featureAvailable = queryFramework().engine().featureAvailable(feature);
|
||||
assumeTrue(StringUtils.format("test disabled; feature [%s] is not available!", feature), featureAvailable);
|
||||
assumeTrue(featureAvailable, StringUtils.format("test disabled; feature [%s] is not available!", feature));
|
||||
}
|
||||
|
||||
public void assertQueryIsUnplannable(final String sql, String expectedError)
|
||||
|
@ -767,7 +743,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
testQuery(plannerConfig, sql, CalciteTests.REGULAR_USER_AUTH_RESULT, ImmutableList.of(), ImmutableList.of());
|
||||
}
|
||||
catch (DruidException e) {
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
buildUnplannableExceptionMatcher().expectMessageContains(expectedError)
|
||||
);
|
||||
|
@ -1013,12 +989,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryLogHook queryLogHook()
|
||||
{
|
||||
return queryLogHook;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
|
||||
{
|
||||
|
@ -1255,7 +1225,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
.build()
|
||||
.run()
|
||||
);
|
||||
MatcherAssert.assertThat(e, exceptionMatcher);
|
||||
assertThat(e, exceptionMatcher);
|
||||
}
|
||||
|
||||
public void analyzeResources(
|
||||
|
@ -1339,7 +1309,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
|
||||
protected void msqIncompatible()
|
||||
{
|
||||
assumeFalse("test case is not MSQ compatible", testBuilder().config.isRunningMSQ());
|
||||
assumeFalse(testBuilder().config.isRunningMSQ(), "test case is not MSQ compatible");
|
||||
}
|
||||
|
||||
protected static boolean isRewriteJoinToFilter(final Map<String, Object> queryContext)
|
||||
|
@ -1392,59 +1362,56 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
* It tests various configs that can be passed to join queries. All the configs provided by this provider should
|
||||
* have the join query engine return the same results.
|
||||
*/
|
||||
public static class QueryContextForJoinProvider
|
||||
public static Object[] provideQueryContexts()
|
||||
{
|
||||
@UsedByJUnitParamsRunner
|
||||
public static Object[] provideQueryContexts()
|
||||
{
|
||||
return new Object[]{
|
||||
// default behavior
|
||||
QUERY_CONTEXT_DEFAULT,
|
||||
// all rewrites enabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, true)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, true)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter-on-value-column rewrites disabled, everything else enabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, false)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, true)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter rewrites fully disabled, join-to-filter enabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, false)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, false)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter rewrites disabled, but value column filters still set to true (it should be ignored and this should
|
||||
// behave the same as the previous context)
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, true)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, false)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter rewrites fully enabled, join-to-filter disabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, true)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, true)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, false)
|
||||
.build(),
|
||||
// all rewrites disabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, false)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, false)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, false)
|
||||
.build(),
|
||||
};
|
||||
}
|
||||
return new Object[] {
|
||||
// default behavior
|
||||
QUERY_CONTEXT_DEFAULT,
|
||||
// all rewrites enabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, true)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, true)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter-on-value-column rewrites disabled, everything else enabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, false)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, true)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter rewrites fully disabled, join-to-filter enabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, false)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, false)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter rewrites disabled, but value column filters still set to true
|
||||
// (it should be ignored and this should
|
||||
// behave the same as the previous context)
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, true)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, false)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, true)
|
||||
.build(),
|
||||
// filter rewrites fully enabled, join-to-filter disabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, true)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, true)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, false)
|
||||
.build(),
|
||||
// all rewrites disabled
|
||||
new ImmutableMap.Builder<String, Object>()
|
||||
.putAll(QUERY_CONTEXT_DEFAULT)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_VALUE_COLUMN_FILTERS_ENABLE_KEY, false)
|
||||
.put(QueryContexts.JOIN_FILTER_REWRITE_ENABLE_KEY, false)
|
||||
.put(QueryContexts.REWRITE_JOIN_TO_FILTER_ENABLE_KEY, false)
|
||||
.build(),
|
||||
};
|
||||
}
|
||||
|
||||
protected Map<String, Object> withLeftDirectAccessEnabled(Map<String, Object> context)
|
||||
|
@ -1628,4 +1595,25 @@ public class BaseCalciteQueryTest extends CalciteTestBase
|
|||
sb.append(post);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method that copies a resource to a temporary file, then returns it.
|
||||
*/
|
||||
public File getResourceAsTemporaryFile(final String resource)
|
||||
{
|
||||
final File file = newTempFile("resourceAsTempFile");
|
||||
final InputStream stream = getClass().getResourceAsStream(resource);
|
||||
|
||||
if (stream == null) {
|
||||
throw new RE(StringUtils.format("No such resource [%s]", resource));
|
||||
}
|
||||
|
||||
try {
|
||||
ByteStreams.copy(stream, Files.newOutputStream(file.toPath()));
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
|
|||
import org.apache.druid.sql.calcite.filtration.Filtration;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
package org.apache.druid.sql.calcite;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import junitparams.JUnitParamsRunner;
|
||||
import junitparams.Parameters;
|
||||
import org.apache.druid.java.util.common.Intervals;
|
||||
import org.apache.druid.java.util.common.granularity.AllGranularity;
|
||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||
|
@ -45,18 +43,16 @@ import org.apache.druid.segment.column.ColumnType;
|
|||
import org.apache.druid.segment.join.JoinType;
|
||||
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
@RunWith(JUnitParamsRunner.class)
|
||||
public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
|
||||
{
|
||||
@Test
|
||||
@Parameters(source = QueryContextForJoinProvider.class)
|
||||
@MethodSource("provideQueryContexts")
|
||||
@ParameterizedTest(name = "{0}")
|
||||
public void testCorrelatedSubquery(Map<String, Object> queryContext)
|
||||
{
|
||||
cannotVectorize();
|
||||
|
@ -172,8 +168,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
|
|||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Parameters(source = QueryContextForJoinProvider.class)
|
||||
@MethodSource("provideQueryContexts")
|
||||
@ParameterizedTest(name = "{0}")
|
||||
public void testCorrelatedSubqueryWithLeftFilter(Map<String, Object> queryContext)
|
||||
{
|
||||
cannotVectorize();
|
||||
|
@ -261,8 +257,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
|
|||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Parameters(source = QueryContextForJoinProvider.class)
|
||||
@MethodSource("provideQueryContexts")
|
||||
@ParameterizedTest(name = "{0}")
|
||||
public void testCorrelatedSubqueryWithLeftFilter_leftDirectAccessDisabled(Map<String, Object> queryContext)
|
||||
{
|
||||
cannotVectorize();
|
||||
|
@ -356,8 +352,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
|
|||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Parameters(source = QueryContextForJoinProvider.class)
|
||||
@MethodSource("provideQueryContexts")
|
||||
@ParameterizedTest(name = "{0}")
|
||||
public void testCorrelatedSubqueryWithCorrelatedQueryFilter(Map<String, Object> queryContext)
|
||||
{
|
||||
cannotVectorize();
|
||||
|
@ -450,8 +446,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
|
|||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Parameters(source = QueryContextForJoinProvider.class)
|
||||
@MethodSource("provideQueryContexts")
|
||||
@ParameterizedTest(name = "{0}")
|
||||
public void testCorrelatedSubqueryWithCorrelatedQueryFilter_Scan(Map<String, Object> queryContext)
|
||||
{
|
||||
cannotVectorize();
|
||||
|
|
|
@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList;
|
|||
import org.apache.druid.common.config.NullHandling;
|
||||
import org.apache.druid.sql.calcite.planner.PlannerConfig;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
|
|
@ -44,9 +44,9 @@ import org.apache.druid.storage.StorageConnector;
|
|||
import org.apache.druid.storage.local.LocalFileExportStorageProvider;
|
||||
import org.apache.druid.storage.local.LocalFileStorageConnectorProvider;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -80,7 +80,7 @@ public class CalciteExportTest extends CalciteIngestionDmlTest
|
|||
|
||||
// Disabled until replace supports external destinations. To be enabled after that point.
|
||||
@Test
|
||||
@Ignore
|
||||
@Disabled
|
||||
public void testReplaceIntoExtern()
|
||||
{
|
||||
testIngestionQuery()
|
||||
|
@ -207,7 +207,7 @@ public class CalciteExportTest extends CalciteIngestionDmlTest
|
|||
|
||||
// Disabled until replace supports external destinations. To be enabled after that point.
|
||||
@Test
|
||||
@Ignore
|
||||
@Disabled
|
||||
public void testReplaceIntoExternParameterized()
|
||||
{
|
||||
testIngestionQuery()
|
||||
|
|
|
@ -65,10 +65,9 @@ import org.apache.druid.sql.guice.SqlBindings;
|
|||
import org.apache.druid.sql.http.SqlParameter;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -82,6 +81,8 @@ import java.util.Objects;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
|
||||
{
|
||||
protected static final Map<String, Object> DEFAULT_CONTEXT =
|
||||
|
@ -181,7 +182,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
|
|||
});
|
||||
}
|
||||
|
||||
@After
|
||||
@AfterEach
|
||||
public void tearDown()
|
||||
{
|
||||
// Catch situations where tests forgot to call "verify" on their tester.
|
||||
|
@ -378,7 +379,6 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
|
|||
throw new ISE("Test must not have expectedQuery");
|
||||
}
|
||||
|
||||
queryLogHook.clearRecordedQueries();
|
||||
final Throwable e = Assert.assertThrows(
|
||||
Throwable.class,
|
||||
() -> {
|
||||
|
@ -386,8 +386,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
|
|||
}
|
||||
);
|
||||
|
||||
MatcherAssert.assertThat(e, validationErrorMatcher);
|
||||
Assert.assertTrue(queryLogHook.getRecordedQueries().isEmpty());
|
||||
assertThat(e, validationErrorMatcher);
|
||||
}
|
||||
|
||||
private void verifySuccess()
|
||||
|
|
|
@ -52,10 +52,9 @@ import org.apache.druid.sql.calcite.planner.Calcites;
|
|||
import org.apache.druid.sql.calcite.planner.PlannerContext;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -69,6 +68,7 @@ import static org.apache.druid.segment.column.ColumnType.DOUBLE;
|
|||
import static org.apache.druid.segment.column.ColumnType.FLOAT;
|
||||
import static org.apache.druid.segment.column.ColumnType.LONG;
|
||||
import static org.apache.druid.segment.column.ColumnType.STRING;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
|
||||
{
|
||||
|
@ -1197,7 +1197,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
|
|||
Assert.fail("Exception should be thrown");
|
||||
}
|
||||
catch (DruidException e) {
|
||||
MatcherAssert.assertThat(e, invalidSqlIs(
|
||||
assertThat(e, invalidSqlIs(
|
||||
"Cannot use an ORDER BY clause on a Query of type [INSERT], use CLUSTERED BY instead"
|
||||
));
|
||||
}
|
||||
|
@ -1216,7 +1216,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
|
|||
Assert.fail("Exception should be thrown");
|
||||
}
|
||||
catch (DruidException e) {
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
invalidSqlIs(
|
||||
"Invalid granularity['invalid_granularity'] specified after PARTITIONED BY clause."
|
||||
|
@ -1243,7 +1243,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
|
|||
Assert.fail("Exception should be thrown");
|
||||
}
|
||||
catch (DruidException e) {
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
invalidSqlIs("Cannot use an ORDER BY clause on a Query of type [INSERT], use CLUSTERED BY instead")
|
||||
);
|
||||
|
@ -1266,7 +1266,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
|
|||
)
|
||||
);
|
||||
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
invalidSqlIs("Operation [INSERT] requires a PARTITIONED BY to be explicitly defined, but none was found.")
|
||||
);
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -54,10 +54,9 @@ import org.apache.druid.sql.calcite.planner.PlannerContext;
|
|||
import org.apache.druid.sql.calcite.rule.ReverseLookupRule;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Arrays;
|
||||
|
@ -65,6 +64,8 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
||||
public class CalciteLookupFunctionQueryTest extends BaseCalciteQueryTest
|
||||
{
|
||||
private static final Map<String, Object> QUERY_CONTEXT =
|
||||
|
@ -1584,7 +1585,7 @@ public class CalciteLookupFunctionQueryTest extends BaseCalciteQueryTest
|
|||
)
|
||||
);
|
||||
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
ThrowableMessageMatcher.hasMessage(CoreMatchers.startsWith("Too many optimize calls[2]"))
|
||||
);
|
||||
|
|
|
@ -50,8 +50,8 @@ import org.apache.druid.segment.virtual.ListFilteredVirtualColumn;
|
|||
import org.apache.druid.sql.calcite.filtration.Filtration;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
|
|
@ -75,10 +75,9 @@ import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
|||
import org.apache.druid.timeline.DataSegment;
|
||||
import org.apache.druid.timeline.partition.LinearShardSpec;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -190,12 +189,12 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
final QueryRunnerFactoryConglomerate conglomerate,
|
||||
final JoinableFactoryWrapper joinableFactory,
|
||||
final Injector injector
|
||||
) throws IOException
|
||||
)
|
||||
{
|
||||
NestedDataModule.registerHandlersAndSerde();
|
||||
final QueryableIndex index =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -211,7 +210,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
|
||||
final QueryableIndex indexMix11 =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -228,7 +227,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
|
||||
final QueryableIndex indexMix12 =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -244,7 +243,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
|
||||
final QueryableIndex indexMix21 =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -260,7 +259,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
|
||||
final QueryableIndex indexMix22 =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -276,7 +275,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
|
||||
final QueryableIndex indexArrays =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -295,12 +294,12 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
)
|
||||
)
|
||||
.inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
|
||||
.inputTmpDir(temporaryFolder.newFolder())
|
||||
.inputTmpDir(newTempFolder())
|
||||
.buildMMappedIndex();
|
||||
|
||||
final QueryableIndex indexAllTypesAuto =
|
||||
IndexBuilder.create()
|
||||
.tmpDir(temporaryFolder.newFolder())
|
||||
.tmpDir(newTempFolder())
|
||||
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
|
||||
.schema(
|
||||
new IncrementalIndexSchema.Builder()
|
||||
|
@ -319,7 +318,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
|
|||
)
|
||||
)
|
||||
.inputFormat(TestDataBuilder.DEFAULT_JSON_INPUT_FORMAT)
|
||||
.inputTmpDir(temporaryFolder.newFolder())
|
||||
.inputTmpDir(newTempFolder())
|
||||
.buildMMappedIndex();
|
||||
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.apache.druid.segment.column.RowSignature;
|
|||
import org.apache.druid.sql.calcite.filtration.Filtration;
|
||||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.http.SqlParameter;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
|
|
@ -123,16 +123,15 @@ import org.apache.druid.sql.calcite.run.EngineFeature;
|
|||
import org.apache.druid.sql.calcite.util.CalciteTests;
|
||||
import org.apache.druid.sql.calcite.util.TestDataBuilder;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Interval;
|
||||
import org.joda.time.Period;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.junit.internal.matchers.ThrowableMessageMatcher;
|
||||
import org.junit.jupiter.api.Assumptions;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -143,10 +142,10 @@ import java.util.Map;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
import static org.junit.Assume.assumeFalse;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assumptions.assumeFalse;
|
||||
import static org.junit.jupiter.api.Assumptions.assumeTrue;
|
||||
|
||||
public class CalciteQueryTest extends BaseCalciteQueryTest
|
||||
{
|
||||
|
@ -352,7 +351,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
)
|
||||
);
|
||||
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
invalidSqlIs("INSERT operations are not supported by requested SQL engine [native], consider using MSQ.")
|
||||
);
|
||||
|
@ -371,7 +370,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
)
|
||||
);
|
||||
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
invalidSqlIs("REPLACE operations are not supported by the requested SQL engine [native]. Consider using MSQ.")
|
||||
);
|
||||
|
@ -2183,7 +2182,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
}
|
||||
|
||||
@Test
|
||||
@Ignore("Disabled since GROUP BY alias can confuse the validator; see DruidConformance::isGroupByAlias")
|
||||
@Disabled("Disabled since GROUP BY alias can confuse the validator; see DruidConformance::isGroupByAlias")
|
||||
public void testGroupByAndOrderByAlias()
|
||||
{
|
||||
msqIncompatible();
|
||||
|
@ -5791,7 +5790,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
{
|
||||
msqIncompatible();
|
||||
|
||||
Assume.assumeFalse(NullHandling.sqlCompatible());
|
||||
Assumptions.assumeFalse(NullHandling.sqlCompatible());
|
||||
|
||||
assertQueryIsUnplannable(
|
||||
// JOIN condition with not-equals (<>).
|
||||
|
@ -6307,7 +6306,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
testBuilder().sql(sql).run();
|
||||
}
|
||||
catch (DruidException e) {
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
invalidSqlIs("Illegal TIMESTAMP constant [CAST('z2000-01-01 00:00:00'):TIMESTAMP(3) NOT NULL]")
|
||||
);
|
||||
|
@ -7609,8 +7608,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
public void testQueryWithMoreThanMaxNumericInFilter()
|
||||
{
|
||||
assumeFalse(
|
||||
"skip in sql compatible mode, this plans to an OR filter with equality filter children",
|
||||
NullHandling.sqlCompatible()
|
||||
NullHandling.sqlCompatible(),
|
||||
"skip in sql compatible mode, this plans to an OR filter with equality filter children"
|
||||
);
|
||||
msqIncompatible();
|
||||
|
||||
|
@ -11419,7 +11418,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
Assert.fail("query execution should fail");
|
||||
}
|
||||
catch (DruidException e) {
|
||||
MatcherAssert.assertThat(
|
||||
assertThat(
|
||||
e,
|
||||
invalidSqlIs(
|
||||
"Invalid number of arguments to function 'TIME_EXTRACT'. Was expecting 2 arguments (line [1], column [8])"
|
||||
|
@ -11613,7 +11612,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
}
|
||||
|
||||
@Test
|
||||
@Ignore("In Calcite 1.17, this test worked, but after upgrading to Calcite 1.21, this query fails with:"
|
||||
@Disabled("In Calcite 1.17, this test worked, but after upgrading to Calcite 1.21, this query fails with:"
|
||||
+ " org.apache.calcite.sql.validate.SqlValidatorException: Column 'dim1' is ambiguous")
|
||||
public void testProjectAfterSort3()
|
||||
{
|
||||
|
@ -12431,7 +12430,6 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
{
|
||||
msqIncompatible();
|
||||
Throwable exception = assertThrows(CannotBuildQueryException.class, () -> {
|
||||
|
||||
testQuery(
|
||||
PLANNER_CONFIG_REQUIRE_TIME_CONDITION,
|
||||
"SELECT COUNT(*) FROM druid.foo\n"
|
||||
|
@ -13989,14 +13987,16 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
|
|||
);
|
||||
}
|
||||
|
||||
@Test(expected = DruidException.class)
|
||||
@Test
|
||||
public void testStringAggExpressionNonConstantSeparator()
|
||||
{
|
||||
testQuery(
|
||||
"SELECT STRING_AGG(DISTINCT CONCAT(dim1, dim2), CONCAT('|', dim1)) FROM foo",
|
||||
ImmutableList.of(),
|
||||
ImmutableList.of()
|
||||
);
|
||||
assertThrows(DruidException.class, () -> {
|
||||
testQuery(
|
||||
"SELECT STRING_AGG(DISTINCT CONCAT(dim1, dim2), CONCAT('|', dim1)) FROM foo",
|
||||
ImmutableList.of(),
|
||||
ImmutableList.of()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue