mirror of https://github.com/apache/druid.git
Merge pull request #1454 from optimizely/hao/hadoop-fixes
A couple of hdfs related fixes
This commit is contained in:
commit
8795792dba
|
@ -81,6 +81,10 @@ public class HdfsStorageDruidModule implements DruidModule
|
||||||
Binders.dataSegmentKillerBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentKiller.class).in(LazySingleton.class);
|
Binders.dataSegmentKillerBinder(binder).addBinding(SCHEME).to(HdfsDataSegmentKiller.class).in(LazySingleton.class);
|
||||||
|
|
||||||
final Configuration conf = new Configuration();
|
final Configuration conf = new Configuration();
|
||||||
|
|
||||||
|
// Set explicit CL. Otherwise it'll try to use thread context CL, which may not have all of our dependencies.
|
||||||
|
conf.setClassLoader(getClass().getClassLoader());
|
||||||
|
|
||||||
if (props != null) {
|
if (props != null) {
|
||||||
for (String propName : System.getProperties().stringPropertyNames()) {
|
for (String propName : System.getProperties().stringPropertyNames()) {
|
||||||
if (propName.startsWith("hadoop.")) {
|
if (propName.startsWith("hadoop.")) {
|
||||||
|
|
|
@ -518,7 +518,7 @@ public class JobHelper
|
||||||
public static Path prependFSIfNullScheme(FileSystem fs, Path path)
|
public static Path prependFSIfNullScheme(FileSystem fs, Path path)
|
||||||
{
|
{
|
||||||
if (path.toUri().getScheme() == null) {
|
if (path.toUri().getScheme() == null) {
|
||||||
path = new Path(fs.getUri().toString(), String.format("./%s", path));
|
path = fs.makeQualified(path);
|
||||||
}
|
}
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue