HDFS-8573. Move creation of restartMeta file logic from BlockReceiver to ReplicaInPipeline. Contributed by Eddy Xu.

This commit is contained in:
Andrew Wang 2015-06-11 10:12:31 -07:00
parent 95c73d49b1
commit b258b344bb
6 changed files with 40 additions and 10 deletions

View File

@ -620,6 +620,9 @@ Release 2.8.0 - UNRELEASED
HDFS-8549. Abort the balancer if an upgrade is in progress. (wang)
HDFS-8573. Move creation of restartMeta file logic from BlockReceiver to
ReplicaInPipeline. (Eddy Xu via wang)
OPTIMIZATIONS
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

View File

@ -23,7 +23,6 @@ import java.io.BufferedOutputStream;
import java.io.Closeable;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.IOException;
@ -839,15 +838,8 @@ class BlockReceiver implements Closeable {
// In case this datanode is shutting down for quick restart,
// send a special ack upstream.
if (datanode.isRestarting() && isClient && !isTransfer) {
File blockFile = ((ReplicaInPipeline)replicaInfo).getBlockFile();
File restartMeta = new File(blockFile.getParent() +
File.pathSeparator + "." + blockFile.getName() + ".restart");
if (restartMeta.exists() && !restartMeta.delete()) {
LOG.warn("Failed to delete restart meta file: " +
restartMeta.getPath());
}
try (Writer out = new OutputStreamWriter(
new FileOutputStream(restartMeta), "UTF-8")) {
replicaInfo.createRestartMetaStream(), "UTF-8")) {
// write out the current time.
out.write(Long.toString(Time.now() + restartBudget));
out.flush();

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.datanode;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import org.apache.hadoop.hdfs.protocol.Block;
@ -288,6 +289,18 @@ public class ReplicaInPipeline extends ReplicaInfo
}
}
@Override
public OutputStream createRestartMetaStream() throws IOException {
File blockFile = getBlockFile();
File restartMeta = new File(blockFile.getParent() +
File.pathSeparator + "." + blockFile.getName() + ".restart");
if (restartMeta.exists() && !restartMeta.delete()) {
DataNode.LOG.warn("Failed to delete restart meta file: " +
restartMeta.getPath());
}
return new FileOutputStream(restartMeta);
}
@Override
public String toString() {
return super.toString()

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.server.datanode;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaOutputStreams;
import org.apache.hadoop.util.DataChecksum;
@ -73,4 +74,13 @@ public interface ReplicaInPipelineInterface extends Replica {
*/
public ReplicaOutputStreams createStreams(boolean isCreate,
DataChecksum requestedChecksum) throws IOException;
/**
* Create an output stream to write restart metadata in case of datanode
* shutting down for quick restart.
*
* @return output stream for writing.
* @throws IOException if any error occurs
*/
public OutputStream createRestartMetaStream() throws IOException;
}

View File

@ -262,6 +262,11 @@ public class SimulatedFSDataset implements FsDatasetSpi<FsVolumeSpi> {
}
}
@Override
public OutputStream createRestartMetaStream() throws IOException {
return new SimulatedOutputStream();
}
@Override
synchronized public long getBlockId() {
return theBlock.getBlockId();

View File

@ -19,6 +19,8 @@
package org.apache.hadoop.hdfs.server.datanode.extdataset;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
import org.apache.hadoop.hdfs.server.datanode.ChunkChecksum;
import org.apache.hadoop.hdfs.server.datanode.ReplicaInPipelineInterface;
@ -59,6 +61,11 @@ public class ExternalReplicaInPipeline implements ReplicaInPipelineInterface {
return new ReplicaOutputStreams(null, null, requestedChecksum, false);
}
@Override
public OutputStream createRestartMetaStream() throws IOException {
return null;
}
@Override
public long getBlockId() {
return 0;