HDFS-13101. Yet another fsimage corruption related to snapshot. Contributed by Shashikant Banerjee.
This commit is contained in:
parent
167acd87da
commit
0a85af959c
|
@ -39,6 +39,7 @@ import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeature;
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeature;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.INodeReference.DstReference;
|
import org.apache.hadoop.hdfs.server.namenode.INodeReference.DstReference;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName;
|
import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
|
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
|
||||||
import org.apache.hadoop.hdfs.util.Diff;
|
import org.apache.hadoop.hdfs.util.Diff;
|
||||||
|
@ -646,6 +647,18 @@ public abstract class INode implements INodeAttributes, Diff.Element<byte[]> {
|
||||||
return parent == null || !parent.isReference()? null: (INodeReference)parent;
|
return parent == null || !parent.isReference()? null: (INodeReference)parent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return true if this is a reference and the reference count is 1;
|
||||||
|
* otherwise, return false.
|
||||||
|
*/
|
||||||
|
public boolean isLastReference() {
|
||||||
|
final INodeReference ref = getParentReference();
|
||||||
|
if (!(ref instanceof WithCount)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return ((WithCount)ref).getReferenceCount() == 1;
|
||||||
|
}
|
||||||
|
|
||||||
/** Set parent directory */
|
/** Set parent directory */
|
||||||
public final void setParent(INodeDirectory parent) {
|
public final void setParent(INodeDirectory parent) {
|
||||||
this.parent = parent;
|
this.parent = parent;
|
||||||
|
|
|
@ -903,6 +903,14 @@ public class INodeDirectory extends INodeWithAdditionalFields
|
||||||
prefix.setLength(prefix.length() - 2);
|
prefix.setLength(prefix.length() - 2);
|
||||||
prefix.append(" ");
|
prefix.append(" ");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final DirectoryWithSnapshotFeature snapshotFeature =
|
||||||
|
getDirectoryWithSnapshotFeature();
|
||||||
|
if (snapshotFeature != null) {
|
||||||
|
out.print(prefix);
|
||||||
|
out.print(snapshotFeature);
|
||||||
|
}
|
||||||
|
out.println();
|
||||||
dumpTreeRecursively(out, prefix, new Iterable<SnapshotAndINode>() {
|
dumpTreeRecursively(out, prefix, new Iterable<SnapshotAndINode>() {
|
||||||
final Iterator<INode> i = getChildrenList(snapshot).iterator();
|
final Iterator<INode> i = getChildrenList(snapshot).iterator();
|
||||||
|
|
||||||
|
|
|
@ -1046,6 +1046,18 @@ public class INodeFile extends INodeWithAdditionalFields
|
||||||
out.print(", blocks=");
|
out.print(", blocks=");
|
||||||
out.print(blocks.length == 0 ? null: blocks[0]);
|
out.print(blocks.length == 0 ? null: blocks[0]);
|
||||||
out.println();
|
out.println();
|
||||||
|
|
||||||
|
final FileWithSnapshotFeature snapshotFeature =
|
||||||
|
getFileWithSnapshotFeature();
|
||||||
|
if (snapshotFeature != null) {
|
||||||
|
if (prefix.length() >= 2) {
|
||||||
|
prefix.setLength(prefix.length() - 2);
|
||||||
|
prefix.append(" ");
|
||||||
|
}
|
||||||
|
out.print(prefix);
|
||||||
|
out.print(snapshotFeature);
|
||||||
|
}
|
||||||
|
out.println();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -314,6 +314,19 @@ abstract class AbstractINodeDiffList<N extends INode,
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return getClass().getSimpleName() + ": " + (diffs != null ? diffs : "[]");
|
if (diffs != null) {
|
||||||
|
final StringBuilder b =
|
||||||
|
new StringBuilder(getClass().getSimpleName()).append("@")
|
||||||
|
.append(Integer.toHexString(hashCode())).append(": ");
|
||||||
|
b.append("[");
|
||||||
|
for (D d : diffs) {
|
||||||
|
b.append(d).append(", ");
|
||||||
|
}
|
||||||
|
b.setLength(b.length() - 2);
|
||||||
|
b.append("]");
|
||||||
|
return b.toString();
|
||||||
|
} else {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -739,14 +739,20 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
|
||||||
// were created before "prior" will be covered by the later
|
// were created before "prior" will be covered by the later
|
||||||
// cleanSubtreeRecursively call.
|
// cleanSubtreeRecursively call.
|
||||||
if (priorCreated != null) {
|
if (priorCreated != null) {
|
||||||
// we only check the node originally in prior's created list
|
if (currentINode.isLastReference()) {
|
||||||
for (INode cNode : priorDiff.diff.getCreatedUnmodifiable()) {
|
// if this is the last reference, the created list can be
|
||||||
if (priorCreated.containsKey(cNode)) {
|
// destroyed.
|
||||||
cNode.cleanSubtree(reclaimContext, snapshot, NO_SNAPSHOT_ID);
|
priorDiff.getChildrenDiff().destroyCreatedList(
|
||||||
|
reclaimContext, currentINode);
|
||||||
|
} else {
|
||||||
|
// we only check the node originally in prior's created list
|
||||||
|
for (INode cNode : priorDiff.diff.getCreatedUnmodifiable()) {
|
||||||
|
if (priorCreated.containsKey(cNode)) {
|
||||||
|
cNode.cleanSubtree(reclaimContext, snapshot, NO_SNAPSHOT_ID);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// When a directory is moved from the deleted list of the posterior
|
// When a directory is moved from the deleted list of the posterior
|
||||||
// diff to the deleted list of this diff, we need to destroy its
|
// diff to the deleted list of this diff, we need to destroy its
|
||||||
// descendants that were 1) created after taking this diff and 2)
|
// descendants that were 1) created after taking this diff and 2)
|
||||||
|
@ -770,4 +776,9 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
|
||||||
reclaimContext.quotaDelta().addQuotaDirUpdate(currentINode, current);
|
reclaimContext.quotaDelta().addQuotaDirUpdate(currentINode, current);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "" + diffs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -225,4 +225,9 @@ public class FileWithSnapshotFeature implements INode.Feature {
|
||||||
file.collectBlocksBeyondSnapshot(snapshotBlocks,
|
file.collectBlocksBeyondSnapshot(snapshotBlocks,
|
||||||
reclaimContext.collectedBlocks());
|
reclaimContext.collectedBlocks());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "" + diffs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.PrintWriter;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -64,7 +65,7 @@ public class TestFSImageWithSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
static final long seed = 0;
|
static final long seed = 0;
|
||||||
static final short NUM_DATANODES = 3;
|
static final short NUM_DATANODES = 1;
|
||||||
static final int BLOCKSIZE = 1024;
|
static final int BLOCKSIZE = 1024;
|
||||||
static final long txid = 1;
|
static final long txid = 1;
|
||||||
|
|
||||||
|
@ -511,4 +512,101 @@ public class TestFSImageWithSnapshot {
|
||||||
fsn = cluster.getNamesystem();
|
fsn = cluster.getNamesystem();
|
||||||
hdfs = cluster.getFileSystem();
|
hdfs = cluster.getFileSystem();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void rename(Path src, Path dst) throws Exception {
|
||||||
|
printTree("Before rename " + src + " -> " + dst);
|
||||||
|
hdfs.rename(src, dst);
|
||||||
|
printTree("After rename " + src + " -> " + dst);
|
||||||
|
}
|
||||||
|
|
||||||
|
void createFile(Path directory, String filename) throws Exception {
|
||||||
|
final Path f = new Path(directory, filename);
|
||||||
|
DFSTestUtil.createFile(hdfs, f, 0, NUM_DATANODES, seed);
|
||||||
|
}
|
||||||
|
|
||||||
|
void appendFile(Path directory, String filename) throws Exception {
|
||||||
|
final Path f = new Path(directory, filename);
|
||||||
|
DFSTestUtil.appendFile(hdfs, f, "more data");
|
||||||
|
printTree("appended " + f);
|
||||||
|
}
|
||||||
|
|
||||||
|
void deleteSnapshot(Path directory, String snapshotName) throws Exception {
|
||||||
|
hdfs.deleteSnapshot(directory, snapshotName);
|
||||||
|
printTree("deleted snapshot " + snapshotName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test (timeout=60000)
|
||||||
|
public void testDoubleRename() throws Exception {
|
||||||
|
final Path parent = new Path("/parent");
|
||||||
|
hdfs.mkdirs(parent);
|
||||||
|
final Path sub1 = new Path(parent, "sub1");
|
||||||
|
final Path sub1foo = new Path(sub1, "foo");
|
||||||
|
hdfs.mkdirs(sub1);
|
||||||
|
hdfs.mkdirs(sub1foo);
|
||||||
|
createFile(sub1foo, "file0");
|
||||||
|
|
||||||
|
printTree("before s0");
|
||||||
|
hdfs.allowSnapshot(parent);
|
||||||
|
hdfs.createSnapshot(parent, "s0");
|
||||||
|
|
||||||
|
createFile(sub1foo, "file1");
|
||||||
|
createFile(sub1foo, "file2");
|
||||||
|
|
||||||
|
final Path sub2 = new Path(parent, "sub2");
|
||||||
|
hdfs.mkdirs(sub2);
|
||||||
|
final Path sub2foo = new Path(sub2, "foo");
|
||||||
|
// mv /parent/sub1/foo to /parent/sub2/foo
|
||||||
|
rename(sub1foo, sub2foo);
|
||||||
|
|
||||||
|
hdfs.createSnapshot(parent, "s1");
|
||||||
|
hdfs.createSnapshot(parent, "s2");
|
||||||
|
printTree("created snapshots: s1, s2");
|
||||||
|
|
||||||
|
appendFile(sub2foo, "file1");
|
||||||
|
createFile(sub2foo, "file3");
|
||||||
|
|
||||||
|
final Path sub3 = new Path(parent, "sub3");
|
||||||
|
hdfs.mkdirs(sub3);
|
||||||
|
// mv /parent/sub2/foo to /parent/sub3/foo
|
||||||
|
rename(sub2foo, sub3);
|
||||||
|
|
||||||
|
hdfs.delete(sub3, true);
|
||||||
|
printTree("deleted " + sub3);
|
||||||
|
|
||||||
|
deleteSnapshot(parent, "s1");
|
||||||
|
restartCluster();
|
||||||
|
|
||||||
|
deleteSnapshot(parent, "s2");
|
||||||
|
restartCluster();
|
||||||
|
}
|
||||||
|
|
||||||
|
void restartCluster() throws Exception {
|
||||||
|
final File before = dumpTree2File("before.txt");
|
||||||
|
|
||||||
|
hdfs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
|
||||||
|
hdfs.saveNamespace();
|
||||||
|
hdfs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
|
||||||
|
|
||||||
|
cluster.shutdown();
|
||||||
|
cluster = new MiniDFSCluster.Builder(conf).format(false)
|
||||||
|
.numDataNodes(NUM_DATANODES).build();
|
||||||
|
cluster.waitActive();
|
||||||
|
fsn = cluster.getNamesystem();
|
||||||
|
hdfs = cluster.getFileSystem();
|
||||||
|
final File after = dumpTree2File("after.txt");
|
||||||
|
SnapshotTestHelper.compareDumpedTreeInFile(before, after, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private final PrintWriter output = new PrintWriter(System.out, true);
|
||||||
|
private int printTreeCount = 0;
|
||||||
|
|
||||||
|
String printTree(String label) throws Exception {
|
||||||
|
output.println();
|
||||||
|
output.println();
|
||||||
|
output.println("***** " + printTreeCount++ + ": " + label);
|
||||||
|
final String b =
|
||||||
|
fsn.getFSDirectory().getINode("/").dumpTreeRecursively().toString();
|
||||||
|
output.println(b);
|
||||||
|
return b;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -245,8 +245,7 @@ public class SnapshotTestHelper {
|
||||||
"\\{blockUCState=\\w+, primaryNodeIndex=[-\\d]+, replicas=\\[\\]\\}",
|
"\\{blockUCState=\\w+, primaryNodeIndex=[-\\d]+, replicas=\\[\\]\\}",
|
||||||
"");
|
"");
|
||||||
}
|
}
|
||||||
|
assertEquals(line1.trim(), line2.trim());
|
||||||
assertEquals(line1, line2);
|
|
||||||
}
|
}
|
||||||
Assert.assertNull(reader1.readLine());
|
Assert.assertNull(reader1.readLine());
|
||||||
Assert.assertNull(reader2.readLine());
|
Assert.assertNull(reader2.readLine());
|
||||||
|
|
Loading…
Reference in New Issue