Add backwards compatability for PR #1922

This commit is contained in:
Charles Allen 2015-11-11 10:27:00 -08:00
parent 929b981710
commit abae47850a
13 changed files with 41 additions and 20 deletions

View File

@ -107,11 +107,13 @@ public class RabbitMQFirehoseFactory implements FirehoseFactory<StringInputRowPa
@JsonCreator @JsonCreator
public RabbitMQFirehoseFactory( public RabbitMQFirehoseFactory(
@JsonProperty("connection") JacksonifiedConnectionFactory connectionFactory, @JsonProperty("connection") JacksonifiedConnectionFactory connectionFactory,
@JsonProperty("config") RabbitMQFirehoseConfig config @JsonProperty("config") RabbitMQFirehoseConfig config,
// See https://github.com/druid-io/druid/pull/1922
@JsonProperty("connectionFactory") JacksonifiedConnectionFactory connectionFactoryCOMPAT
) throws Exception ) throws Exception
{ {
this.connectionFactory = connectionFactory == null this.connectionFactory = connectionFactory == null
? JacksonifiedConnectionFactory.makeDefaultConnectionFactory() ? connectionFactoryCOMPAT == null ? JacksonifiedConnectionFactory.makeDefaultConnectionFactory() : connectionFactoryCOMPAT
: connectionFactory; : connectionFactory;
this.config = config == null ? RabbitMQFirehoseConfig.makeDefaultConfig() : config; this.config = config == null ? RabbitMQFirehoseConfig.makeDefaultConfig() : config;

View File

@ -64,7 +64,8 @@ public class RabbitMQFirehoseFactoryTest
RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory( RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory(
connectionFactory, connectionFactory,
config config,
null
); );
byte[] bytes = mapper.writeValueAsBytes(factory); byte[] bytes = mapper.writeValueAsBytes(factory);
@ -86,7 +87,8 @@ public class RabbitMQFirehoseFactoryTest
RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory( RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory(
connectionFactory, connectionFactory,
config config,
null
); );
byte[] bytes = mapper.writeValueAsBytes(factory); byte[] bytes = mapper.writeValueAsBytes(factory);

View File

@ -56,7 +56,8 @@ public class HadoopTuningConfig implements TuningConfig
false, false,
null, null,
false, false,
false false,
null
); );
} }
@ -88,7 +89,9 @@ public class HadoopTuningConfig implements TuningConfig
final @JsonProperty("ignoreInvalidRows") boolean ignoreInvalidRows, final @JsonProperty("ignoreInvalidRows") boolean ignoreInvalidRows,
final @JsonProperty("jobProperties") Map<String, String> jobProperties, final @JsonProperty("jobProperties") Map<String, String> jobProperties,
final @JsonProperty("combineText") boolean combineText, final @JsonProperty("combineText") boolean combineText,
final @JsonProperty("useCombiner") Boolean useCombiner final @JsonProperty("useCombiner") Boolean useCombiner,
// See https://github.com/druid-io/druid/pull/1922
final @JsonProperty("rowFlushBoundary") Integer maxRowsInMemoryCOMPAT
) )
{ {
this.workingPath = workingPath; this.workingPath = workingPath;
@ -96,7 +99,7 @@ public class HadoopTuningConfig implements TuningConfig
this.partitionsSpec = partitionsSpec == null ? DEFAULT_PARTITIONS_SPEC : partitionsSpec; this.partitionsSpec = partitionsSpec == null ? DEFAULT_PARTITIONS_SPEC : partitionsSpec;
this.shardSpecs = shardSpecs == null ? DEFAULT_SHARD_SPECS : shardSpecs; this.shardSpecs = shardSpecs == null ? DEFAULT_SHARD_SPECS : shardSpecs;
this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec; this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec;
this.rowFlushBoundary = maxRowsInMemory == null ? DEFAULT_ROW_FLUSH_BOUNDARY : maxRowsInMemory; this.rowFlushBoundary = maxRowsInMemory == null ? maxRowsInMemoryCOMPAT == null ? DEFAULT_ROW_FLUSH_BOUNDARY : maxRowsInMemoryCOMPAT : maxRowsInMemory;
this.leaveIntermediate = leaveIntermediate; this.leaveIntermediate = leaveIntermediate;
this.cleanupOnFailure = cleanupOnFailure == null ? true : cleanupOnFailure; this.cleanupOnFailure = cleanupOnFailure == null ? true : cleanupOnFailure;
this.overwriteFiles = overwriteFiles; this.overwriteFiles = overwriteFiles;
@ -201,7 +204,8 @@ public class HadoopTuningConfig implements TuningConfig
ignoreInvalidRows, ignoreInvalidRows,
jobProperties, jobProperties,
combineText, combineText,
useCombiner useCombiner,
null
); );
} }
@ -220,7 +224,8 @@ public class HadoopTuningConfig implements TuningConfig
ignoreInvalidRows, ignoreInvalidRows,
jobProperties, jobProperties,
combineText, combineText,
useCombiner useCombiner,
null
); );
} }
@ -239,7 +244,8 @@ public class HadoopTuningConfig implements TuningConfig
ignoreInvalidRows, ignoreInvalidRows,
jobProperties, jobProperties,
combineText, combineText,
useCombiner useCombiner,
null
); );
} }
} }

View File

@ -380,7 +380,8 @@ public class BatchDeltaIngestionTest
false, false,
null, null,
false, false,
false false,
null
) )
) )
); );

View File

@ -160,7 +160,8 @@ public class DetermineHashedPartitionsJobTest
false, false,
null, null,
false, false,
false false,
null
) )
); );
this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec); this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec);

View File

@ -263,7 +263,8 @@ public class DeterminePartitionsJobTest
false, false,
null, null,
false, false,
false false,
null
) )
) )
); );

View File

@ -204,7 +204,8 @@ public class HadoopDruidIndexerConfigTest
false, false,
null, null,
false, false,
false false,
null
) )
); );
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(spec); HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(spec);

View File

@ -52,7 +52,8 @@ public class HadoopTuningConfigTest
true, true,
null, null,
true, true,
true true,
null
); );
HadoopTuningConfig actual = jsonReadWriteRead(jsonMapper.writeValueAsString(expected), HadoopTuningConfig.class); HadoopTuningConfig actual = jsonReadWriteRead(jsonMapper.writeValueAsString(expected), HadoopTuningConfig.class);

View File

@ -394,7 +394,8 @@ public class IndexGeneratorJobTest
false, false,
ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
false, false,
useCombiner useCombiner,
null
) )
) )
); );

View File

@ -112,7 +112,8 @@ public class JobHelperTest
"THISISMYACCESSKEY" "THISISMYACCESSKEY"
), ),
false, false,
false false,
null
) )
) )
); );

View File

@ -200,7 +200,8 @@ public class HadoopConverterJobTest
false, false,
null, null,
false, false,
false false,
null
) )
) )
); );

View File

@ -46,7 +46,9 @@ public class MoveTask extends AbstractFixedIntervalTask
@JsonProperty("dataSource") String dataSource, @JsonProperty("dataSource") String dataSource,
@JsonProperty("interval") Interval interval, @JsonProperty("interval") Interval interval,
@JsonProperty("target") Map<String, Object> targetLoadSpec, @JsonProperty("target") Map<String, Object> targetLoadSpec,
@JsonProperty("context") Map<String, Object> context @JsonProperty("context") Map<String, Object> context,
// See https://github.com/druid-io/druid/pull/1922
@JsonProperty("targetLoadSpec") Map<String, Object> targetLoadSpecCOMPAT
) )
{ {
super( super(
@ -55,7 +57,7 @@ public class MoveTask extends AbstractFixedIntervalTask
interval, interval,
context context
); );
this.targetLoadSpec = targetLoadSpec; this.targetLoadSpec = targetLoadSpec == null ? targetLoadSpecCOMPAT : targetLoadSpec;
} }
@Override @Override

View File

@ -501,6 +501,7 @@ public class TaskSerdeTest
"foo", "foo",
new Interval("2010-01-01/P1D"), new Interval("2010-01-01/P1D"),
ImmutableMap.<String, Object>of("bucket", "hey", "baseKey", "what"), ImmutableMap.<String, Object>of("bucket", "hey", "baseKey", "what"),
null,
null null
); );