fix nullhandling exceptions related to test ordering (#9570)

* fix nullhandling exceptions related to test ordering

Tests might get executed in different order depending on the maven
version and the test environment. This may lead to "NullHandling module
not initialized" errors for some tests where we do not initialize
null-handling explicitly.

* use InitializedNullHandlingTest
This commit is contained in:
Xavier Léauté 2020-03-27 09:46:31 -07:00 committed by GitHub
parent 2c49f6d89a
commit b4ad3d0d88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 17 additions and 4 deletions

View File

@ -422,6 +422,13 @@
<version>${project.parent.version}</version> <version>${project.parent.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>druid-core</artifactId>
<version>${project.parent.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@ -34,6 +34,7 @@ import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.java.util.common.parsers.CloseableIterator; import org.apache.druid.java.util.common.parsers.CloseableIterator;
import org.apache.druid.storage.hdfs.HdfsStorageDruidModule; import org.apache.druid.storage.hdfs.HdfsStorageDruidModule;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
@ -64,7 +65,7 @@ import java.util.stream.Collectors;
import java.util.stream.IntStream; import java.util.stream.IntStream;
@RunWith(Enclosed.class) @RunWith(Enclosed.class)
public class HdfsInputSourceTest public class HdfsInputSourceTest extends InitializedNullHandlingTest
{ {
private static final String PATH = "/foo/bar"; private static final String PATH = "/foo/bar";
private static final Configuration CONFIGURATION = new Configuration(); private static final Configuration CONFIGURATION = new Configuration();

View File

@ -35,6 +35,7 @@ import org.apache.druid.indexing.seekablestream.common.StreamPartition;
import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.parsers.CloseableIterator; import org.apache.druid.java.util.common.parsers.CloseableIterator;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@ -54,8 +55,9 @@ import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream; import java.util.stream.IntStream;
public class RecordSupplierInputSourceTest public class RecordSupplierInputSourceTest extends InitializedNullHandlingTest
{ {
private static final int NUM_COLS = 16; private static final int NUM_COLS = 16;
private static final int NUM_ROWS = 128; private static final int NUM_ROWS = 128;
private static final String TIMESTAMP_STRING = "2019-01-01"; private static final String TIMESTAMP_STRING = "2019-01-01";

View File

@ -60,6 +60,7 @@ import org.apache.druid.query.topn.TopNResultValue;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
@ -73,7 +74,7 @@ import java.util.Map;
/** /**
*/ */
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class SchemalessTestFullTest public class SchemalessTestFullTest extends InitializedNullHandlingTest
{ {
@Parameterized.Parameters @Parameterized.Parameters
public static Collection<?> constructorFeeder() public static Collection<?> constructorFeeder()

View File

@ -57,6 +57,7 @@ import org.apache.druid.query.topn.TopNQueryBuilder;
import org.apache.druid.query.topn.TopNResultValue; import org.apache.druid.query.topn.TopNResultValue;
import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -72,8 +73,9 @@ import java.util.List;
/** /**
*/ */
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class SchemalessTestSimpleTest public class SchemalessTestSimpleTest extends InitializedNullHandlingTest
{ {
@Parameterized.Parameters @Parameterized.Parameters
public static Collection<?> constructorFeeder() public static Collection<?> constructorFeeder()
{ {