Merge pull request #1761 from gianm/local-path-for-hadoop

LocalDataSegmentPusher: Fix for Hadoop + relative paths.
This commit is contained in:
Xavier Léauté 2015-09-22 08:57:44 -07:00
commit 35caa753aa
2 changed files with 46 additions and 13 deletions

View File

@ -54,7 +54,7 @@ public class LocalDataSegmentPusher implements DataSegmentPusher
@Override @Override
public String getPathForHadoop(String dataSource) public String getPathForHadoop(String dataSource)
{ {
return String.format("file://%s/%s", config.getStorageDirectory(), dataSource); return new File(config.getStorageDirectory().getAbsoluteFile(), dataSource).toURI().toString();
} }
@Override @Override

View File

@ -45,7 +45,8 @@ public class LocalDataSegmentPusherTest
@Before @Before
public void setUp() throws IOException public void setUp() throws IOException
{ {
dataSegment = new DataSegment("", dataSegment = new DataSegment(
"",
new Interval(0, 1), new Interval(0, 1),
"", "",
null, null,
@ -53,7 +54,8 @@ public class LocalDataSegmentPusherTest
null, null,
new NoneShardSpec(), new NoneShardSpec(),
null, null,
-1); -1
);
localDataSegmentPusher = new LocalDataSegmentPusher(new LocalDataSegmentPusherConfig(), new ObjectMapper()); localDataSegmentPusher = new LocalDataSegmentPusher(new LocalDataSegmentPusherConfig(), new ObjectMapper());
dataSegmentFiles = Files.createTempDir(); dataSegmentFiles = Files.createTempDir();
ByteStreams.write( ByteStreams.write(
@ -72,16 +74,47 @@ public class LocalDataSegmentPusherTest
DataSegment returnSegment = localDataSegmentPusher.push(dataSegmentFiles, dataSegment); DataSegment returnSegment = localDataSegmentPusher.push(dataSegmentFiles, dataSegment);
Assert.assertNotNull(returnSegment); Assert.assertNotNull(returnSegment);
Assert.assertEquals(dataSegment, returnSegment); Assert.assertEquals(dataSegment, returnSegment);
outDir = new File(new LocalDataSegmentPusherConfig().getStorageDirectory(), DataSegmentPusherUtil.getStorageDir(returnSegment)); outDir = new File(
new LocalDataSegmentPusherConfig().getStorageDirectory(),
DataSegmentPusherUtil.getStorageDir(returnSegment)
);
File versionFile = new File(outDir, "index.zip"); File versionFile = new File(outDir, "index.zip");
File descriptorJson = new File(outDir, "descriptor.json"); File descriptorJson = new File(outDir, "descriptor.json");
Assert.assertTrue(versionFile.exists()); Assert.assertTrue(versionFile.exists());
Assert.assertTrue(descriptorJson.exists()); Assert.assertTrue(descriptorJson.exists());
} }
@Test
public void testPathForHadoopAbsolute()
{
LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig();
config.storageDirectory = new File("/druid");
Assert.assertEquals(
"file:/druid/foo",
new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop("foo")
);
}
@Test
public void testPathForHadoopRelative()
{
LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig();
config.storageDirectory = new File("druid");
Assert.assertEquals(
String.format("file:%s/druid/foo", System.getProperty("user.dir")),
new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop("foo")
);
}
@After @After
public void tearDown() throws IOException{ public void tearDown() throws IOException
{
FileUtils.deleteDirectory(dataSegmentFiles); FileUtils.deleteDirectory(dataSegmentFiles);
if (outDir != null) {
FileUtils.deleteDirectory(outDir); FileUtils.deleteDirectory(outDir);
} }
}
} }