HDFS-5936. MiniDFSCluster does not clean data left behind by SecondaryNameNode. Contributed by Binglin Chang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1572150 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7be2c002b3
commit
5f9cdbd24f
@ -631,6 +631,9 @@ Release 2.4.0 - UNRELEASED
|
|||||||
HDFS-6008. Namenode dead node link is giving HTTP error 500.
|
HDFS-6008. Namenode dead node link is giving HTTP error 500.
|
||||||
(Benoy Antony via cnauroth)
|
(Benoy Antony via cnauroth)
|
||||||
|
|
||||||
|
HDFS-5936. MiniDFSCluster does not clean data left behind by
|
||||||
|
SecondaryNameNode. (Binglin Chang via cnauroth)
|
||||||
|
|
||||||
Release 2.3.1 - UNRELEASED
|
Release 2.3.1 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -79,6 +79,7 @@
|
|||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
||||||
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
|
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
|
||||||
import org.apache.hadoop.hdfs.server.common.Storage;
|
import org.apache.hadoop.hdfs.server.common.Storage;
|
||||||
|
import org.apache.hadoop.hdfs.server.common.Util;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataStorage;
|
import org.apache.hadoop.hdfs.server.datanode.DataStorage;
|
||||||
@ -813,6 +814,14 @@ private void createNameNodesAndSetConf(MiniDFSNNTopology nnTopology,
|
|||||||
throw new IOException("Could not fully delete " + nameDir);
|
throw new IOException("Could not fully delete " + nameDir);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Collection<URI> checkpointDirs = Util.stringCollectionAsURIs(conf
|
||||||
|
.getTrimmedStringCollection(DFS_NAMENODE_CHECKPOINT_DIR_KEY));
|
||||||
|
for (URI checkpointDirUri : checkpointDirs) {
|
||||||
|
File checkpointDir = new File(checkpointDirUri);
|
||||||
|
if (checkpointDir.exists() && !FileUtil.fullyDelete(checkpointDir)) {
|
||||||
|
throw new IOException("Could not fully delete " + checkpointDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean formatThisOne = format;
|
boolean formatThisOne = format;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user