mirror of https://github.com/apache/druid.git
Add backwards compatability for PR #1922
This commit is contained in:
parent
929b981710
commit
abae47850a
|
@ -107,11 +107,13 @@ public class RabbitMQFirehoseFactory implements FirehoseFactory<StringInputRowPa
|
|||
@JsonCreator
|
||||
public RabbitMQFirehoseFactory(
|
||||
@JsonProperty("connection") JacksonifiedConnectionFactory connectionFactory,
|
||||
@JsonProperty("config") RabbitMQFirehoseConfig config
|
||||
@JsonProperty("config") RabbitMQFirehoseConfig config,
|
||||
// See https://github.com/druid-io/druid/pull/1922
|
||||
@JsonProperty("connectionFactory") JacksonifiedConnectionFactory connectionFactoryCOMPAT
|
||||
) throws Exception
|
||||
{
|
||||
this.connectionFactory = connectionFactory == null
|
||||
? JacksonifiedConnectionFactory.makeDefaultConnectionFactory()
|
||||
? connectionFactoryCOMPAT == null ? JacksonifiedConnectionFactory.makeDefaultConnectionFactory() : connectionFactoryCOMPAT
|
||||
: connectionFactory;
|
||||
this.config = config == null ? RabbitMQFirehoseConfig.makeDefaultConfig() : config;
|
||||
|
||||
|
|
|
@ -64,7 +64,8 @@ public class RabbitMQFirehoseFactoryTest
|
|||
|
||||
RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory(
|
||||
connectionFactory,
|
||||
config
|
||||
config,
|
||||
null
|
||||
);
|
||||
|
||||
byte[] bytes = mapper.writeValueAsBytes(factory);
|
||||
|
@ -86,7 +87,8 @@ public class RabbitMQFirehoseFactoryTest
|
|||
|
||||
RabbitMQFirehoseFactory factory = new RabbitMQFirehoseFactory(
|
||||
connectionFactory,
|
||||
config
|
||||
config,
|
||||
null
|
||||
);
|
||||
|
||||
byte[] bytes = mapper.writeValueAsBytes(factory);
|
||||
|
|
|
@ -56,7 +56,8 @@ public class HadoopTuningConfig implements TuningConfig
|
|||
false,
|
||||
null,
|
||||
false,
|
||||
false
|
||||
false,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -88,7 +89,9 @@ public class HadoopTuningConfig implements TuningConfig
|
|||
final @JsonProperty("ignoreInvalidRows") boolean ignoreInvalidRows,
|
||||
final @JsonProperty("jobProperties") Map<String, String> jobProperties,
|
||||
final @JsonProperty("combineText") boolean combineText,
|
||||
final @JsonProperty("useCombiner") Boolean useCombiner
|
||||
final @JsonProperty("useCombiner") Boolean useCombiner,
|
||||
// See https://github.com/druid-io/druid/pull/1922
|
||||
final @JsonProperty("rowFlushBoundary") Integer maxRowsInMemoryCOMPAT
|
||||
)
|
||||
{
|
||||
this.workingPath = workingPath;
|
||||
|
@ -96,7 +99,7 @@ public class HadoopTuningConfig implements TuningConfig
|
|||
this.partitionsSpec = partitionsSpec == null ? DEFAULT_PARTITIONS_SPEC : partitionsSpec;
|
||||
this.shardSpecs = shardSpecs == null ? DEFAULT_SHARD_SPECS : shardSpecs;
|
||||
this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec;
|
||||
this.rowFlushBoundary = maxRowsInMemory == null ? DEFAULT_ROW_FLUSH_BOUNDARY : maxRowsInMemory;
|
||||
this.rowFlushBoundary = maxRowsInMemory == null ? maxRowsInMemoryCOMPAT == null ? DEFAULT_ROW_FLUSH_BOUNDARY : maxRowsInMemoryCOMPAT : maxRowsInMemory;
|
||||
this.leaveIntermediate = leaveIntermediate;
|
||||
this.cleanupOnFailure = cleanupOnFailure == null ? true : cleanupOnFailure;
|
||||
this.overwriteFiles = overwriteFiles;
|
||||
|
@ -201,7 +204,8 @@ public class HadoopTuningConfig implements TuningConfig
|
|||
ignoreInvalidRows,
|
||||
jobProperties,
|
||||
combineText,
|
||||
useCombiner
|
||||
useCombiner,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -220,7 +224,8 @@ public class HadoopTuningConfig implements TuningConfig
|
|||
ignoreInvalidRows,
|
||||
jobProperties,
|
||||
combineText,
|
||||
useCombiner
|
||||
useCombiner,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -239,7 +244,8 @@ public class HadoopTuningConfig implements TuningConfig
|
|||
ignoreInvalidRows,
|
||||
jobProperties,
|
||||
combineText,
|
||||
useCombiner
|
||||
useCombiner,
|
||||
null
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -380,7 +380,8 @@ public class BatchDeltaIngestionTest
|
|||
false,
|
||||
null,
|
||||
false,
|
||||
false
|
||||
false,
|
||||
null
|
||||
)
|
||||
)
|
||||
);
|
||||
|
|
|
@ -160,7 +160,8 @@ public class DetermineHashedPartitionsJobTest
|
|||
false,
|
||||
null,
|
||||
false,
|
||||
false
|
||||
false,
|
||||
null
|
||||
)
|
||||
);
|
||||
this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec);
|
||||
|
|
|
@ -263,7 +263,8 @@ public class DeterminePartitionsJobTest
|
|||
false,
|
||||
null,
|
||||
false,
|
||||
false
|
||||
false,
|
||||
null
|
||||
)
|
||||
)
|
||||
);
|
||||
|
|
|
@ -204,7 +204,8 @@ public class HadoopDruidIndexerConfigTest
|
|||
false,
|
||||
null,
|
||||
false,
|
||||
false
|
||||
false,
|
||||
null
|
||||
)
|
||||
);
|
||||
HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(spec);
|
||||
|
|
|
@ -52,7 +52,8 @@ public class HadoopTuningConfigTest
|
|||
true,
|
||||
null,
|
||||
true,
|
||||
true
|
||||
true,
|
||||
null
|
||||
);
|
||||
|
||||
HadoopTuningConfig actual = jsonReadWriteRead(jsonMapper.writeValueAsString(expected), HadoopTuningConfig.class);
|
||||
|
|
|
@ -394,7 +394,8 @@ public class IndexGeneratorJobTest
|
|||
false,
|
||||
ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
|
||||
false,
|
||||
useCombiner
|
||||
useCombiner,
|
||||
null
|
||||
)
|
||||
)
|
||||
);
|
||||
|
|
|
@ -112,7 +112,8 @@ public class JobHelperTest
|
|||
"THISISMYACCESSKEY"
|
||||
),
|
||||
false,
|
||||
false
|
||||
false,
|
||||
null
|
||||
)
|
||||
)
|
||||
);
|
||||
|
|
|
@ -200,7 +200,8 @@ public class HadoopConverterJobTest
|
|||
false,
|
||||
null,
|
||||
false,
|
||||
false
|
||||
false,
|
||||
null
|
||||
)
|
||||
)
|
||||
);
|
||||
|
|
|
@ -46,7 +46,9 @@ public class MoveTask extends AbstractFixedIntervalTask
|
|||
@JsonProperty("dataSource") String dataSource,
|
||||
@JsonProperty("interval") Interval interval,
|
||||
@JsonProperty("target") Map<String, Object> targetLoadSpec,
|
||||
@JsonProperty("context") Map<String, Object> context
|
||||
@JsonProperty("context") Map<String, Object> context,
|
||||
// See https://github.com/druid-io/druid/pull/1922
|
||||
@JsonProperty("targetLoadSpec") Map<String, Object> targetLoadSpecCOMPAT
|
||||
)
|
||||
{
|
||||
super(
|
||||
|
@ -55,7 +57,7 @@ public class MoveTask extends AbstractFixedIntervalTask
|
|||
interval,
|
||||
context
|
||||
);
|
||||
this.targetLoadSpec = targetLoadSpec;
|
||||
this.targetLoadSpec = targetLoadSpec == null ? targetLoadSpecCOMPAT : targetLoadSpec;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -501,6 +501,7 @@ public class TaskSerdeTest
|
|||
"foo",
|
||||
new Interval("2010-01-01/P1D"),
|
||||
ImmutableMap.<String, Object>of("bucket", "hey", "baseKey", "what"),
|
||||
null,
|
||||
null
|
||||
);
|
||||
|
||||
|
|
Loading…
Reference in New Issue