mirror of
https://github.com/apache/druid.git
synced 2025-02-07 02:28:19 +00:00
Merge pull request #580 from metamx/fix-hadoop
fix broken standalone hadoop ingestion
This commit is contained in:
commit
ef02645fff
@ -37,7 +37,6 @@ import com.google.inject.Module;
|
|||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
import com.metamx.common.guava.FunctionalIterable;
|
import com.metamx.common.guava.FunctionalIterable;
|
||||||
import com.metamx.common.logger.Logger;
|
import com.metamx.common.logger.Logger;
|
||||||
import com.metamx.common.parsers.TimestampParser;
|
|
||||||
import io.druid.common.utils.JodaUtils;
|
import io.druid.common.utils.JodaUtils;
|
||||||
import io.druid.data.input.InputRow;
|
import io.druid.data.input.InputRow;
|
||||||
import io.druid.data.input.impl.StringInputRowParser;
|
import io.druid.data.input.impl.StringInputRowParser;
|
||||||
@ -111,7 +110,17 @@ public class HadoopDruidIndexerConfig
|
|||||||
|
|
||||||
public static HadoopDruidIndexerConfig fromMap(Map<String, Object> argSpec)
|
public static HadoopDruidIndexerConfig fromMap(Map<String, Object> argSpec)
|
||||||
{
|
{
|
||||||
|
//backwards compatibility
|
||||||
|
if (argSpec.containsKey("schema")) {
|
||||||
return HadoopDruidIndexerConfig.jsonMapper.convertValue(argSpec, HadoopDruidIndexerConfig.class);
|
return HadoopDruidIndexerConfig.jsonMapper.convertValue(argSpec, HadoopDruidIndexerConfig.class);
|
||||||
|
} else {
|
||||||
|
return new HadoopDruidIndexerConfig(
|
||||||
|
HadoopDruidIndexerConfig.jsonMapper.convertValue(
|
||||||
|
argSpec,
|
||||||
|
HadoopIngestionSpec.class
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -138,7 +138,7 @@ public class HadoopIngestionSpec extends IngestionSpec<HadoopIOConfig, HadoopTun
|
|||||||
this.dataSchema = new DataSchema(
|
this.dataSchema = new DataSchema(
|
||||||
dataSource,
|
dataSource,
|
||||||
new StringInputRowParser(
|
new StringInputRowParser(
|
||||||
dataSpec == null ? null : dataSpec.toParseSpec(timestampSpec, dimensionExclusions),
|
dataSpec == null ? null : dataSpec.toParseSpec(theTimestampSpec, dimensionExclusions),
|
||||||
null, null, null, null
|
null, null, null, null
|
||||||
),
|
),
|
||||||
rollupSpec == null
|
rollupSpec == null
|
||||||
|
@ -67,11 +67,10 @@ public class CliInternalHadoopIndexer implements Runnable
|
|||||||
try {
|
try {
|
||||||
HadoopIngestionSpec spec;
|
HadoopIngestionSpec spec;
|
||||||
if (argumentSpec.startsWith("{")) {
|
if (argumentSpec.startsWith("{")) {
|
||||||
spec = HadoopDruidIndexerConfig.jsonMapper.readValue(argumentSpec, HadoopIngestionSpec.class);
|
return HadoopDruidIndexerConfig.fromString(argumentSpec);
|
||||||
} else {
|
} else {
|
||||||
spec = HadoopDruidIndexerConfig.jsonMapper.readValue(new File(argumentSpec), HadoopIngestionSpec.class);
|
return HadoopDruidIndexerConfig.fromFile(new File(argumentSpec));
|
||||||
}
|
}
|
||||||
return HadoopDruidIndexerConfig.fromSchema(spec);
|
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
throw Throwables.propagate(e);
|
throw Throwables.propagate(e);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user