diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java index cd7d46ef69f..5232b01b7a7 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPusher.java @@ -54,7 +54,7 @@ public class LocalDataSegmentPusher implements DataSegmentPusher @Override public String getPathForHadoop(String dataSource) { - return String.format("file://%s/%s", config.getStorageDirectory(), dataSource); + return new File(config.getStorageDirectory().getAbsoluteFile(), dataSource).toURI().toString(); } @Override diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java index 47d6b5c76ab..7533aee4d55 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java @@ -45,15 +45,17 @@ public class LocalDataSegmentPusherTest @Before public void setUp() throws IOException { - dataSegment = new DataSegment("", - new Interval(0, 1), - "", - null, - null, - null, - new NoneShardSpec(), - null, - -1); + dataSegment = new DataSegment( + "", + new Interval(0, 1), + "", + null, + null, + null, + new NoneShardSpec(), + null, + -1 + ); localDataSegmentPusher = new LocalDataSegmentPusher(new LocalDataSegmentPusherConfig(), new ObjectMapper()); dataSegmentFiles = Files.createTempDir(); ByteStreams.write( @@ -72,16 +74,47 @@ public class LocalDataSegmentPusherTest DataSegment returnSegment = localDataSegmentPusher.push(dataSegmentFiles, dataSegment); Assert.assertNotNull(returnSegment); Assert.assertEquals(dataSegment, returnSegment); - outDir = new File(new LocalDataSegmentPusherConfig().getStorageDirectory(), DataSegmentPusherUtil.getStorageDir(returnSegment)); + outDir = new File( + new LocalDataSegmentPusherConfig().getStorageDirectory(), + DataSegmentPusherUtil.getStorageDir(returnSegment) + ); File versionFile = new File(outDir, "index.zip"); File descriptorJson = new File(outDir, "descriptor.json"); Assert.assertTrue(versionFile.exists()); Assert.assertTrue(descriptorJson.exists()); } + @Test + public void testPathForHadoopAbsolute() + { + LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig(); + config.storageDirectory = new File("/druid"); + + Assert.assertEquals( + "file:/druid/foo", + new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop("foo") + ); + } + + @Test + public void testPathForHadoopRelative() + { + LocalDataSegmentPusherConfig config = new LocalDataSegmentPusherConfig(); + config.storageDirectory = new File("druid"); + + Assert.assertEquals( + String.format("file:%s/druid/foo", System.getProperty("user.dir")), + new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop("foo") + ); + } + @After - public void tearDown() throws IOException{ + public void tearDown() throws IOException + { FileUtils.deleteDirectory(dataSegmentFiles); - FileUtils.deleteDirectory(outDir); + + if (outDir != null) { + FileUtils.deleteDirectory(outDir); + } } }