remove unused ObjectMapper from DatasourcePathSpec (#7754)

This commit is contained in:
Himanshu 2019-05-24 23:15:40 -07:00 committed by Fangjin Yang
parent 3a77a3e112
commit 2b7bb064b5
3 changed files with 0 additions and 25 deletions

View File

@ -19,10 +19,8 @@
package org.apache.druid.indexer.path;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
@ -51,7 +49,6 @@ public class DatasourcePathSpec implements PathSpec
public static final String TYPE = "dataSource";
private final ObjectMapper mapper;
private final DatasourceIngestionSpec ingestionSpec;
private final long maxSplitSize;
private final List<WindowedDataSegment> segments;
@ -67,14 +64,12 @@ public class DatasourcePathSpec implements PathSpec
@JsonCreator
public DatasourcePathSpec(
@JacksonInject ObjectMapper mapper,
@JsonProperty("segments") List<WindowedDataSegment> segments,
@JsonProperty("ingestionSpec") DatasourceIngestionSpec spec,
@JsonProperty("maxSplitSize") Long maxSplitSize,
@JsonProperty(USE_NEW_AGGS_KEY) boolean useNewAggs
)
{
this.mapper = Preconditions.checkNotNull(mapper, "null mapper");
this.segments = segments;
this.ingestionSpec = Preconditions.checkNotNull(spec, "null ingestionSpec");

View File

@ -111,7 +111,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
public void testUpdateSegmentListIfDatasourcePathSpecIsUsedWithJustDatasourcePathSpec() throws Exception
{
PathSpec pathSpec = new DatasourcePathSpec(
jsonMapper,
null,
new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false, null),
null,
@ -131,7 +130,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
public void testUpdateSegmentListIfDatasourcePathSpecWithMatchingUserSegments() throws Exception
{
PathSpec pathSpec = new DatasourcePathSpec(
jsonMapper,
null,
new DatasourceIngestionSpec(
testDatasource,
@ -161,7 +159,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
public void testUpdateSegmentListThrowsExceptionWithUserSegmentsMismatch() throws Exception
{
PathSpec pathSpec = new DatasourcePathSpec(
jsonMapper,
null,
new DatasourceIngestionSpec(
testDatasource,
@ -188,7 +185,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
throws Exception
{
PathSpec pathSpec = new DatasourcePathSpec(
jsonMapper,
null,
new DatasourceIngestionSpec(
testDatasource,
@ -221,7 +217,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
ImmutableList.of(
new StaticPathSpec("/xyz", null),
new DatasourcePathSpec(
jsonMapper,
null,
new DatasourceIngestionSpec(
testDatasource,
@ -238,7 +233,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
false
),
new DatasourcePathSpec(
jsonMapper,
null,
new DatasourceIngestionSpec(
testDatasource2,

View File

@ -41,13 +41,11 @@ import org.apache.druid.indexer.hadoop.DatasourceIngestionSpec;
import org.apache.druid.indexer.hadoop.DatasourceInputFormat;
import org.apache.druid.indexer.hadoop.WindowedDataSegment;
import org.apache.druid.initialization.Initialization;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.server.DruidNode;
@ -182,7 +180,6 @@ public class DatasourcePathSpecTest
ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);
DatasourcePathSpec expected = new DatasourcePathSpec(
jsonMapper,
null,
ingestionSpec1,
Long.valueOf(10),
@ -192,7 +189,6 @@ public class DatasourcePathSpecTest
Assert.assertEquals(expected, actual);
expected = new DatasourcePathSpec(
jsonMapper,
null,
ingestionSpec1,
null,
@ -202,7 +198,6 @@ public class DatasourcePathSpecTest
Assert.assertEquals(expected, actual);
expected = new DatasourcePathSpec(
jsonMapper,
segments1,
ingestionSpec1,
null,
@ -212,7 +207,6 @@ public class DatasourcePathSpecTest
Assert.assertEquals(expected, actual);
expected = new DatasourcePathSpec(
jsonMapper,
segments1,
ingestionSpec1,
null,
@ -227,10 +221,7 @@ public class DatasourcePathSpecTest
{
HadoopDruidIndexerConfig hadoopIndexerConfig = makeHadoopDruidIndexerConfig();
ObjectMapper mapper = TestHelper.makeJsonMapper();
DatasourcePathSpec pathSpec1 = new DatasourcePathSpec(
mapper,
segments1,
ingestionSpec1,
null,
@ -238,7 +229,6 @@ public class DatasourcePathSpecTest
);
DatasourcePathSpec pathSpec2 = new DatasourcePathSpec(
mapper,
segments2,
ingestionSpec2,
null,
@ -281,10 +271,7 @@ public class DatasourcePathSpecTest
{
HadoopDruidIndexerConfig hadoopIndexerConfig = makeHadoopDruidIndexerConfig();
ObjectMapper mapper = new DefaultObjectMapper();
DatasourcePathSpec pathSpec = new DatasourcePathSpec(
mapper,
null,
ingestionSpec1,
null,
@ -306,7 +293,6 @@ public class DatasourcePathSpecTest
//now with ignoreWhenNoSegments flag set
pathSpec = new DatasourcePathSpec(
mapper,
null,
ingestionSpec1.withIgnoreWhenNoSegments(true),
null,