mirror of https://github.com/apache/druid.git
Merge pull request #1761 from gianm/local-path-for-hadoop
LocalDataSegmentPusher: Fix for Hadoop + relative paths.
This commit is contained in:
commit
35caa753aa
|
@ -54,7 +54,7 @@ public class LocalDataSegmentPusher implements DataSegmentPusher
|
|||
@Override
|
||||
public String getPathForHadoop(String dataSource)
|
||||
{
|
||||
return String.format("file://%s/%s", config.getStorageDirectory(), dataSource);
|
||||
return new File(config.getStorageDirectory().getAbsoluteFile(), dataSource).toURI().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -45,7 +45,8 @@ public class LocalDataSegmentPusherTest
|
|||
@Before
|
||||
public void setUp() throws IOException
|
||||
{
|
||||
dataSegment = new DataSegment("",
|
||||
dataSegment = new DataSegment(
|
||||
"",
|
||||
new Interval(0, 1),
|
||||
"",
|
||||
null,
|
||||
|
@ -53,7 +54,8 @@ public class LocalDataSegmentPusherTest
|
|||
null,
|
||||
new NoneShardSpec(),
|
||||
null,
|
||||
-1);
|
||||
-1
|
||||
);
|
||||
localDataSegmentPusher = new LocalDataSegmentPusher(new LocalDataSegmentPusherConfig(), new ObjectMapper());
|
||||
dataSegmentFiles = Files.createTempDir();
|
||||
ByteStreams.write(
|
||||
|
@ -72,16 +74,47 @@ public class LocalDataSegmentPusherTest
|
|||
DataSegment returnSegment = localDataSegmentPusher.push(dataSegmentFiles, dataSegment);
|
||||
Assert.assertNotNull(returnSegment);
|
||||
Assert.assertEquals(dataSegment, returnSegment);
|
||||
outDir = new File(new LocalDataSegmentPusherConfig().getStorageDirectory(), DataSegmentPusherUtil.getStorageDir(returnSegment));
|
||||
outDir = new File(
|
||||
new LocalDataSegmentPusherConfig().getStorageDirectory(),
|
||||
DataSegmentPusherUtil.getStorageDir(returnSegment)
|
||||
);
|
||||
File versionFile = new File(outDir, "index.zip");
|
||||
File descriptorJson = new File(outDir, "descriptor.json");
|
||||
Assert.assertTrue(versionFile.exists());
|
||||
Assert.assertTrue(descriptorJson.exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPathForHadoopAbsolute()
|
||||
{
|
||||
LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig();
|
||||
config.storageDirectory = new File("/druid");
|
||||
|
||||
Assert.assertEquals(
|
||||
"file:/druid/foo",
|
||||
new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop("foo")
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPathForHadoopRelative()
|
||||
{
|
||||
LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig();
|
||||
config.storageDirectory = new File("druid");
|
||||
|
||||
Assert.assertEquals(
|
||||
String.format("file:%s/druid/foo", System.getProperty("user.dir")),
|
||||
new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop("foo")
|
||||
);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException{
|
||||
public void tearDown() throws IOException
|
||||
{
|
||||
FileUtils.deleteDirectory(dataSegmentFiles);
|
||||
|
||||
if (outDir != null) {
|
||||
FileUtils.deleteDirectory(outDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue