Fix nullhandling exception (#10095)

Co-authored-by: Atul Mohan <atulmohan@yahoo-inc.com>
This commit is contained in:
Atul Mohan 2020-06-29 22:55:38 -05:00 committed by GitHub
parent 33a37d85d7
commit 0841c89df6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 16 additions and 3 deletions

View File

@ -45,6 +45,7 @@ import org.apache.druid.query.timeseries.TimeseriesQuery;
import org.apache.druid.query.timeseries.TimeseriesResultValue; import org.apache.druid.query.timeseries.TimeseriesResultValue;
import org.apache.druid.segment.indexing.RealtimeTuningConfig; import org.apache.druid.segment.indexing.RealtimeTuningConfig;
import org.apache.druid.segment.realtime.plumber.Committers; import org.apache.druid.segment.realtime.plumber.Committers;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec; import org.apache.druid.timeline.partition.LinearShardSpec;
import org.junit.Assert; import org.junit.Assert;
@ -58,7 +59,7 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
public class AppenderatorTest public class AppenderatorTest extends InitializedNullHandlingTest
{ {
private static final List<SegmentIdWithShardSpec> IDENTIFIERS = ImmutableList.of( private static final List<SegmentIdWithShardSpec> IDENTIFIERS = ImmutableList.of(
si("2000/2001", "A", 0), si("2000/2001", "A", 0),

View File

@ -22,6 +22,7 @@ package org.apache.druid.segment.realtime.appenderator;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.MapBasedInputRow;
import org.apache.druid.indexing.overlord.SegmentPublishResult; import org.apache.druid.indexing.overlord.SegmentPublishResult;
@ -75,6 +76,10 @@ public class BatchAppenderatorDriverTest extends EasyMockSupport
private BatchAppenderatorDriver driver; private BatchAppenderatorDriver driver;
private DataSegmentKiller dataSegmentKiller; private DataSegmentKiller dataSegmentKiller;
static {
NullHandling.initializeForTests();
}
@Before @Before
public void setup() public void setup()
{ {

View File

@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.Committer; import org.apache.druid.data.input.Committer;
import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.MapBasedInputRow;
@ -99,6 +100,10 @@ public class StreamAppenderatorDriverTest extends EasyMockSupport
private StreamAppenderatorDriver driver; private StreamAppenderatorDriver driver;
private DataSegmentKiller dataSegmentKiller; private DataSegmentKiller dataSegmentKiller;
static {
NullHandling.initializeForTests();
}
@Before @Before
public void setUp() public void setUp()
{ {

View File

@ -60,6 +60,7 @@ import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFacto
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.TmpFileSegmentWriteOutMediumFactory;
import org.apache.druid.server.coordination.DataSegmentAnnouncer; import org.apache.druid.server.coordination.DataSegmentAnnouncer;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.easymock.EasyMock; import org.easymock.EasyMock;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
@ -83,7 +84,7 @@ import java.util.concurrent.TimeUnit;
* *
*/ */
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class RealtimePlumberSchoolTest public class RealtimePlumberSchoolTest extends InitializedNullHandlingTest
{ {
@Parameterized.Parameters(name = "rejectionPolicy = {0}, segmentWriteOutMediumFactory = {1}") @Parameterized.Parameters(name = "rejectionPolicy = {0}, segmentWriteOutMediumFactory = {1}")
public static Collection<?> constructorFeeder() public static Collection<?> constructorFeeder()

View File

@ -37,6 +37,7 @@ import org.apache.druid.segment.indexing.RealtimeTuningConfig;
import org.apache.druid.segment.indexing.TuningConfigs; import org.apache.druid.segment.indexing.TuningConfigs;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.segment.realtime.FireHydrant; import org.apache.druid.segment.realtime.FireHydrant;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.joda.time.Period; import org.joda.time.Period;
@ -48,7 +49,7 @@ import java.util.List;
/** /**
*/ */
public class SinkTest public class SinkTest extends InitializedNullHandlingTest
{ {
@Test @Test
public void testSwap() throws Exception public void testSwap() throws Exception