Using fully qualified hdfs path. (#3705)

* Using fully qualified hdfs path.

* Review changes.

* Remove unused imports.

* Variable name change.
This commit is contained in:
Akash Dwivedi 2017-01-17 12:40:22 -08:00 committed by Himanshu
parent 558dc365a4
commit e550d48772
2 changed files with 21 additions and 10 deletions

View File

@ -50,17 +50,20 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
private final HdfsDataSegmentPusherConfig config;
private final Configuration hadoopConfig;
private final ObjectMapper jsonMapper;
private final String fullyQualifiedStorageDirectory;
@Inject
public HdfsDataSegmentPusher(
HdfsDataSegmentPusherConfig config,
Configuration hadoopConfig,
ObjectMapper jsonMapper
)
) throws IOException
{
this.config = config;
this.hadoopConfig = hadoopConfig;
this.jsonMapper = jsonMapper;
this.fullyQualifiedStorageDirectory = FileSystem.newInstance(hadoopConfig).makeQualified(new Path(config.getStorageDirectory()))
.toUri().toString();
log.info("Configured HDFS as deep storage");
}
@ -75,7 +78,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
@Override
public String getPathForHadoop()
{
return new Path(config.getStorageDirectory()).toUri().toString();
return fullyQualifiedStorageDirectory;
}
@Override
@ -86,13 +89,13 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
log.info(
"Copying segment[%s] to HDFS at location[%s/%s]",
segment.getIdentifier(),
config.getStorageDirectory(),
fullyQualifiedStorageDirectory,
storageDir
);
Path tmpFile = new Path(String.format(
"%s/%s/index.zip",
config.getStorageDirectory(),
fullyQualifiedStorageDirectory,
UUIDUtils.generateUuid()
));
FileSystem fs = tmpFile.getFileSystem(hadoopConfig);
@ -104,7 +107,11 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
final DataSegment dataSegment;
try (FSDataOutputStream out = fs.create(tmpFile)) {
size = CompressionUtils.zip(inDir, out);
final Path outFile = new Path(String.format("%s/%s/index.zip", config.getStorageDirectory(), storageDir));
final Path outFile = new Path(String.format(
"%s/%s/index.zip",
fullyQualifiedStorageDirectory,
storageDir
));
final Path outDir = outFile.getParent();
dataSegment = createDescriptorFile(
segment.withLoadSpec(makeLoadSpec(outFile))
@ -131,12 +138,14 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
));
}
}
} finally {
}
finally {
try {
if (fs.exists(tmpFile.getParent()) && !fs.delete(tmpFile.getParent(), true)) {
log.error("Failed to delete temp directory[%s]", tmpFile.getParent());
}
} catch(IOException ex) {
}
catch (IOException ex) {
log.error(ex, "Failed to delete temp directory[%s]", tmpFile.getParent());
}
}

View File

@ -28,6 +28,8 @@ import io.druid.segment.loading.DataSegmentPusherUtil;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.NoneShardSpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Rule;
@ -57,8 +59,8 @@ public class HdfsDataSegmentPusherTest
@Test
public void testPushWithBadScheme() throws Exception
{
expectedException.expect(IOException.class);
expectedException.expectMessage("No FileSystem for scheme: xyzzy");
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Wrong FS");
testUsingScheme("xyzzy");
// Not reached
@ -115,7 +117,7 @@ public class HdfsDataSegmentPusherTest
"path",
String.format(
"%s/%s/index.zip",
config.getStorageDirectory(),
FileSystem.newInstance(conf).makeQualified(new Path(config.getStorageDirectory())).toUri().toString(),
DataSegmentPusherUtil.getHdfsStorageDir(segmentToPush)
)
), segment.getLoadSpec());