Fix Hadoop CLI jobs

* Change "schema" --> "spec" for cli hadoop to keep up with internal hadoop
* Added check for HadoopDruidIndexerConfig deserialization from Map to see if it is trying to get a HadoopDruidIndexerConfig or a HadoopIngestionSpec
This commit is contained in:
Charles Allen 2014-12-02 11:01:12 -08:00
parent 59542c41f8
commit 7cd689be75
2 changed files with 21 additions and 9 deletions

View File

@ -114,6 +114,15 @@ public class HadoopDruidIndexerConfig
public static HadoopDruidIndexerConfig fromMap(Map<String, Object> argSpec) public static HadoopDruidIndexerConfig fromMap(Map<String, Object> argSpec)
{ {
// Eventually PathSpec needs to get rid of its Hadoop dependency, then maybe this can be ingested directly without
// the Map<> intermediary
if(argSpec.containsKey("spec")){
return HadoopDruidIndexerConfig.jsonMapper.convertValue(
argSpec,
HadoopDruidIndexerConfig.class
);
}
return new HadoopDruidIndexerConfig( return new HadoopDruidIndexerConfig(
HadoopDruidIndexerConfig.jsonMapper.convertValue( HadoopDruidIndexerConfig.jsonMapper.convertValue(
argSpec, argSpec,
@ -142,6 +151,7 @@ public class HadoopDruidIndexerConfig
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static HadoopDruidIndexerConfig fromString(String str) public static HadoopDruidIndexerConfig fromString(String str)
{ {
// This is a map to try and prevent dependency screwbally-ness
try { try {
return fromMap( return fromMap(
(Map<String, Object>) HadoopDruidIndexerConfig.jsonMapper.readValue( (Map<String, Object>) HadoopDruidIndexerConfig.jsonMapper.readValue(
@ -171,7 +181,7 @@ public class HadoopDruidIndexerConfig
@JsonCreator @JsonCreator
public HadoopDruidIndexerConfig( public HadoopDruidIndexerConfig(
final @JsonProperty("schema") HadoopIngestionSpec schema final @JsonProperty("spec") HadoopIngestionSpec schema
) )
{ {
this.schema = schema; this.schema = schema;
@ -202,7 +212,7 @@ public class HadoopDruidIndexerConfig
this.rollupGran = schema.getDataSchema().getGranularitySpec().getQueryGranularity(); this.rollupGran = schema.getDataSchema().getGranularitySpec().getQueryGranularity();
} }
@JsonProperty @JsonProperty(value="spec")
public HadoopIngestionSpec getSchema() public HadoopIngestionSpec getSchema()
{ {
return schema; return schema;

View File

@ -19,6 +19,7 @@
package io.druid.indexer; package io.druid.indexer;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.annotation.JsonTypeName;
import io.druid.indexer.updater.MetadataStorageUpdaterJobSpec; import io.druid.indexer.updater.MetadataStorageUpdaterJobSpec;
@ -35,6 +36,7 @@ public class HadoopIOConfig implements IOConfig
private final MetadataStorageUpdaterJobSpec metadataUpdateSpec; private final MetadataStorageUpdaterJobSpec metadataUpdateSpec;
private final String segmentOutputPath; private final String segmentOutputPath;
@JsonCreator
public HadoopIOConfig( public HadoopIOConfig(
final @JsonProperty("inputSpec") Map<String, Object> pathSpec, final @JsonProperty("inputSpec") Map<String, Object> pathSpec,
final @JsonProperty("metadataUpdateSpec") MetadataStorageUpdaterJobSpec metadataUpdateSpec, final @JsonProperty("metadataUpdateSpec") MetadataStorageUpdaterJobSpec metadataUpdateSpec,