mirror of
https://github.com/apache/druid.git
synced 2025-02-09 03:24:55 +00:00
remove unused ObjectMapper from DatasourcePathSpec (#7754)
This commit is contained in:
parent
3a77a3e112
commit
2b7bb064b5
@ -19,10 +19,8 @@
|
|||||||
|
|
||||||
package org.apache.druid.indexer.path;
|
package org.apache.druid.indexer.path;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JacksonInject;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
import com.google.common.base.Function;
|
import com.google.common.base.Function;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
@ -51,7 +49,6 @@ public class DatasourcePathSpec implements PathSpec
|
|||||||
|
|
||||||
public static final String TYPE = "dataSource";
|
public static final String TYPE = "dataSource";
|
||||||
|
|
||||||
private final ObjectMapper mapper;
|
|
||||||
private final DatasourceIngestionSpec ingestionSpec;
|
private final DatasourceIngestionSpec ingestionSpec;
|
||||||
private final long maxSplitSize;
|
private final long maxSplitSize;
|
||||||
private final List<WindowedDataSegment> segments;
|
private final List<WindowedDataSegment> segments;
|
||||||
@ -67,14 +64,12 @@ public class DatasourcePathSpec implements PathSpec
|
|||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public DatasourcePathSpec(
|
public DatasourcePathSpec(
|
||||||
@JacksonInject ObjectMapper mapper,
|
|
||||||
@JsonProperty("segments") List<WindowedDataSegment> segments,
|
@JsonProperty("segments") List<WindowedDataSegment> segments,
|
||||||
@JsonProperty("ingestionSpec") DatasourceIngestionSpec spec,
|
@JsonProperty("ingestionSpec") DatasourceIngestionSpec spec,
|
||||||
@JsonProperty("maxSplitSize") Long maxSplitSize,
|
@JsonProperty("maxSplitSize") Long maxSplitSize,
|
||||||
@JsonProperty(USE_NEW_AGGS_KEY) boolean useNewAggs
|
@JsonProperty(USE_NEW_AGGS_KEY) boolean useNewAggs
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.mapper = Preconditions.checkNotNull(mapper, "null mapper");
|
|
||||||
this.segments = segments;
|
this.segments = segments;
|
||||||
this.ingestionSpec = Preconditions.checkNotNull(spec, "null ingestionSpec");
|
this.ingestionSpec = Preconditions.checkNotNull(spec, "null ingestionSpec");
|
||||||
|
|
||||||
|
@ -111,7 +111,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||||||
public void testUpdateSegmentListIfDatasourcePathSpecIsUsedWithJustDatasourcePathSpec() throws Exception
|
public void testUpdateSegmentListIfDatasourcePathSpecIsUsedWithJustDatasourcePathSpec() throws Exception
|
||||||
{
|
{
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false, null),
|
new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false, null),
|
||||||
null,
|
null,
|
||||||
@ -131,7 +130,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||||||
public void testUpdateSegmentListIfDatasourcePathSpecWithMatchingUserSegments() throws Exception
|
public void testUpdateSegmentListIfDatasourcePathSpecWithMatchingUserSegments() throws Exception
|
||||||
{
|
{
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
testDatasource,
|
||||||
@ -161,7 +159,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||||||
public void testUpdateSegmentListThrowsExceptionWithUserSegmentsMismatch() throws Exception
|
public void testUpdateSegmentListThrowsExceptionWithUserSegmentsMismatch() throws Exception
|
||||||
{
|
{
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
testDatasource,
|
||||||
@ -188,7 +185,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
testDatasource,
|
||||||
@ -221,7 +217,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new StaticPathSpec("/xyz", null),
|
new StaticPathSpec("/xyz", null),
|
||||||
new DatasourcePathSpec(
|
new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
testDatasource,
|
||||||
@ -238,7 +233,6 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||||||
false
|
false
|
||||||
),
|
),
|
||||||
new DatasourcePathSpec(
|
new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource2,
|
testDatasource2,
|
||||||
|
@ -41,13 +41,11 @@ import org.apache.druid.indexer.hadoop.DatasourceIngestionSpec;
|
|||||||
import org.apache.druid.indexer.hadoop.DatasourceInputFormat;
|
import org.apache.druid.indexer.hadoop.DatasourceInputFormat;
|
||||||
import org.apache.druid.indexer.hadoop.WindowedDataSegment;
|
import org.apache.druid.indexer.hadoop.WindowedDataSegment;
|
||||||
import org.apache.druid.initialization.Initialization;
|
import org.apache.druid.initialization.Initialization;
|
||||||
import org.apache.druid.jackson.DefaultObjectMapper;
|
|
||||||
import org.apache.druid.java.util.common.ISE;
|
import org.apache.druid.java.util.common.ISE;
|
||||||
import org.apache.druid.java.util.common.Intervals;
|
import org.apache.druid.java.util.common.Intervals;
|
||||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
|
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
|
||||||
import org.apache.druid.segment.TestHelper;
|
|
||||||
import org.apache.druid.segment.indexing.DataSchema;
|
import org.apache.druid.segment.indexing.DataSchema;
|
||||||
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
|
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
|
||||||
import org.apache.druid.server.DruidNode;
|
import org.apache.druid.server.DruidNode;
|
||||||
@ -182,7 +180,6 @@ public class DatasourcePathSpecTest
|
|||||||
ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);
|
ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);
|
||||||
|
|
||||||
DatasourcePathSpec expected = new DatasourcePathSpec(
|
DatasourcePathSpec expected = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
ingestionSpec1,
|
ingestionSpec1,
|
||||||
Long.valueOf(10),
|
Long.valueOf(10),
|
||||||
@ -192,7 +189,6 @@ public class DatasourcePathSpecTest
|
|||||||
Assert.assertEquals(expected, actual);
|
Assert.assertEquals(expected, actual);
|
||||||
|
|
||||||
expected = new DatasourcePathSpec(
|
expected = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
null,
|
null,
|
||||||
ingestionSpec1,
|
ingestionSpec1,
|
||||||
null,
|
null,
|
||||||
@ -202,7 +198,6 @@ public class DatasourcePathSpecTest
|
|||||||
Assert.assertEquals(expected, actual);
|
Assert.assertEquals(expected, actual);
|
||||||
|
|
||||||
expected = new DatasourcePathSpec(
|
expected = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
segments1,
|
segments1,
|
||||||
ingestionSpec1,
|
ingestionSpec1,
|
||||||
null,
|
null,
|
||||||
@ -212,7 +207,6 @@ public class DatasourcePathSpecTest
|
|||||||
Assert.assertEquals(expected, actual);
|
Assert.assertEquals(expected, actual);
|
||||||
|
|
||||||
expected = new DatasourcePathSpec(
|
expected = new DatasourcePathSpec(
|
||||||
jsonMapper,
|
|
||||||
segments1,
|
segments1,
|
||||||
ingestionSpec1,
|
ingestionSpec1,
|
||||||
null,
|
null,
|
||||||
@ -227,10 +221,7 @@ public class DatasourcePathSpecTest
|
|||||||
{
|
{
|
||||||
HadoopDruidIndexerConfig hadoopIndexerConfig = makeHadoopDruidIndexerConfig();
|
HadoopDruidIndexerConfig hadoopIndexerConfig = makeHadoopDruidIndexerConfig();
|
||||||
|
|
||||||
ObjectMapper mapper = TestHelper.makeJsonMapper();
|
|
||||||
|
|
||||||
DatasourcePathSpec pathSpec1 = new DatasourcePathSpec(
|
DatasourcePathSpec pathSpec1 = new DatasourcePathSpec(
|
||||||
mapper,
|
|
||||||
segments1,
|
segments1,
|
||||||
ingestionSpec1,
|
ingestionSpec1,
|
||||||
null,
|
null,
|
||||||
@ -238,7 +229,6 @@ public class DatasourcePathSpecTest
|
|||||||
);
|
);
|
||||||
|
|
||||||
DatasourcePathSpec pathSpec2 = new DatasourcePathSpec(
|
DatasourcePathSpec pathSpec2 = new DatasourcePathSpec(
|
||||||
mapper,
|
|
||||||
segments2,
|
segments2,
|
||||||
ingestionSpec2,
|
ingestionSpec2,
|
||||||
null,
|
null,
|
||||||
@ -281,10 +271,7 @@ public class DatasourcePathSpecTest
|
|||||||
{
|
{
|
||||||
HadoopDruidIndexerConfig hadoopIndexerConfig = makeHadoopDruidIndexerConfig();
|
HadoopDruidIndexerConfig hadoopIndexerConfig = makeHadoopDruidIndexerConfig();
|
||||||
|
|
||||||
ObjectMapper mapper = new DefaultObjectMapper();
|
|
||||||
|
|
||||||
DatasourcePathSpec pathSpec = new DatasourcePathSpec(
|
DatasourcePathSpec pathSpec = new DatasourcePathSpec(
|
||||||
mapper,
|
|
||||||
null,
|
null,
|
||||||
ingestionSpec1,
|
ingestionSpec1,
|
||||||
null,
|
null,
|
||||||
@ -306,7 +293,6 @@ public class DatasourcePathSpecTest
|
|||||||
|
|
||||||
//now with ignoreWhenNoSegments flag set
|
//now with ignoreWhenNoSegments flag set
|
||||||
pathSpec = new DatasourcePathSpec(
|
pathSpec = new DatasourcePathSpec(
|
||||||
mapper,
|
|
||||||
null,
|
null,
|
||||||
ingestionSpec1.withIgnoreWhenNoSegments(true),
|
ingestionSpec1.withIgnoreWhenNoSegments(true),
|
||||||
null,
|
null,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user