HDFS-16975. FileWithSnapshotFeature.isCurrentFileDeleted is not reloaded from FSImage. (#5546)

This commit is contained in:
Tsz-Wo Nicholas Sze 2023-04-24 09:04:28 -07:00 committed by GitHub
parent 05e6dc19ea
commit dc78849f27
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 40 additions and 49 deletions

View File

@ -798,7 +798,7 @@ public class FSImageFormat {
if (underConstruction) { if (underConstruction) {
file.toUnderConstruction(clientName, clientMachine); file.toUnderConstruction(clientName, clientMachine);
} }
return fileDiffs == null ? file : new INodeFile(file, fileDiffs); return fileDiffs == null ? file : file.loadSnapshotFeature(fileDiffs);
} else if (numBlocks == -1) { } else if (numBlocks == -1) {
//directory //directory

View File

@ -871,7 +871,14 @@ public abstract class INode implements INodeAttributes, Diff.Element<byte[]> {
long id = getId(); long id = getId();
return (int)(id^(id>>>32)); return (int)(id^(id>>>32));
} }
@VisibleForTesting
public final StringBuilder dumpParentINodes() {
final StringBuilder b = parent == null? new StringBuilder()
: parent.dumpParentINodes().append("\n ");
return b.append(toDetailString());
}
/** /**
* Dump the subtree starting from this inode. * Dump the subtree starting from this inode.
* @return a text representation of the tree. * @return a text representation of the tree.
@ -896,10 +903,17 @@ public abstract class INode implements INodeAttributes, Diff.Element<byte[]> {
@VisibleForTesting @VisibleForTesting
public void dumpTreeRecursively(PrintWriter out, StringBuilder prefix, public void dumpTreeRecursively(PrintWriter out, StringBuilder prefix,
int snapshotId) { int snapshotId) {
dumpINode(out, prefix, snapshotId);
}
public void dumpINode(PrintWriter out, StringBuilder prefix,
int snapshotId) {
out.print(prefix); out.print(prefix);
out.print(" "); out.print(" ");
final String name = getLocalName(); final String name = getLocalName();
out.print(name.isEmpty()? "/": name); out.print(name != null && name.isEmpty()? "/": name);
out.print(", isInCurrentState? ");
out.print(isInCurrentState());
out.print(" ("); out.print(" (");
out.print(getObjectString()); out.print(getObjectString());
out.print("), "); out.print("), ");

View File

@ -283,12 +283,6 @@ public class INodeFile extends INodeWithAdditionalFields
setBlocks(that.blocks); setBlocks(that.blocks);
} }
public INodeFile(INodeFile that, FileDiffList diffs) {
this(that);
Preconditions.checkArgument(!that.isWithSnapshot());
this.addSnapshotFeature(diffs);
}
/** @return true unconditionally. */ /** @return true unconditionally. */
@Override @Override
public final boolean isFile() { public final boolean isFile() {
@ -458,7 +452,16 @@ public class INodeFile extends INodeWithAdditionalFields
this.addFeature(sf); this.addFeature(sf);
return sf; return sf;
} }
/** Used by FSImage. */
public INodeFile loadSnapshotFeature(FileDiffList diffs) {
final FileWithSnapshotFeature sf = addSnapshotFeature(diffs);
if (!isInCurrentState()) {
sf.deleteCurrentFile();
}
return this;
}
/** /**
* If feature list contains a {@link FileWithSnapshotFeature}, return it; * If feature list contains a {@link FileWithSnapshotFeature}, return it;
* otherwise, return null. * otherwise, return null.
@ -1092,7 +1095,12 @@ public class INodeFile extends INodeWithAdditionalFields
@Override @Override
public void dumpTreeRecursively(PrintWriter out, StringBuilder prefix, public void dumpTreeRecursively(PrintWriter out, StringBuilder prefix,
final int snapshotId) { final int snapshotId) {
super.dumpTreeRecursively(out, prefix, snapshotId); dumpINodeFile(out, prefix, snapshotId);
}
public void dumpINodeFile(PrintWriter out, StringBuilder prefix,
final int snapshotId) {
dumpINode(out, prefix, snapshotId);
out.print(", fileSize=" + computeFileSize(snapshotId)); out.print(", fileSize=" + computeFileSize(snapshotId));
// only compare the first block // only compare the first block
out.print(", blocks="); out.print(", blocks=");

View File

@ -269,7 +269,7 @@ public class FSImageFormatPBSnapshot {
} }
diffs.addFirst(diff); diffs.addFirst(diff);
} }
file.addSnapshotFeature(diffs); file.loadSnapshotFeature(diffs);
short repl = file.getPreferredBlockReplication(); short repl = file.getPreferredBlockReplication();
for (BlockInfo b : file.getBlocks()) { for (BlockInfo b : file.getBlocks()) {
if (b.getReplication() < repl) { if (b.getReplication() < repl) {

View File

@ -243,6 +243,6 @@ public class FileWithSnapshotFeature implements INode.Feature {
@Override @Override
public String toString() { public String toString() {
return "" + diffs; return "isCurrentFileDeleted? " + isCurrentFileDeleted + ", " + diffs;
} }
} }

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.namenode.visitor; package org.apache.hadoop.hdfs.server.namenode.visitor;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.namenode.DirectoryWithQuotaFeature; import org.apache.hadoop.hdfs.server.namenode.DirectoryWithQuotaFeature;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INode;
@ -29,7 +28,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeSymlink;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshotFeature;
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import java.io.PrintWriter; import java.io.PrintWriter;
@ -63,7 +61,7 @@ public final class NamespacePrintVisitor implements NamespaceVisitor {
} }
private final PrintWriter out; private final PrintWriter out;
private final StringBuffer prefix = new StringBuffer(); private final StringBuilder prefix = new StringBuilder();
private NamespacePrintVisitor(PrintWriter out) { private NamespacePrintVisitor(PrintWriter out) {
this.out = out; this.out = out;
@ -74,39 +72,12 @@ public final class NamespacePrintVisitor implements NamespaceVisitor {
} }
private void printINode(INode iNode, int snapshot) { private void printINode(INode iNode, int snapshot) {
out.print(prefix); iNode.dumpINode(out, prefix, snapshot);
out.print(" ");
final String name = iNode.getLocalName();
out.print(name != null && name.isEmpty()? "/": name);
out.print(" (");
out.print(iNode.getObjectString());
out.print("), ");
out.print(iNode.getParentString());
out.print(", " + iNode.getPermissionStatus(snapshot));
} }
@Override @Override
public void visitFile(INodeFile file, int snapshot) { public void visitFile(INodeFile file, int snapshot) {
printINode(file, snapshot); file.dumpINodeFile(out, prefix, snapshot);
out.print(", fileSize=" + file.computeFileSize(snapshot));
// print only the first block, if it exists
out.print(", blocks=");
final BlockInfo[] blocks = file.getBlocks();
out.print(blocks.length == 0 ? null: blocks[0]);
out.println();
final FileWithSnapshotFeature snapshotFeature
= file.getFileWithSnapshotFeature();
if (snapshotFeature != null) {
if (prefix.length() >= 2) {
prefix.setLength(prefix.length() - 2);
prefix.append(" ");
}
out.print(prefix);
out.print(snapshotFeature);
}
out.println();
} }
@Override @Override

View File

@ -196,10 +196,8 @@ public class TestFSImageWithSnapshot {
cluster.waitActive(); cluster.waitActive();
fsn = cluster.getNamesystem(); fsn = cluster.getNamesystem();
hdfs = cluster.getFileSystem(); hdfs = cluster.getFileSystem();
final INodeDirectory rootNode = fsn.dir.getRoot();
INodeDirectory rootNode = fsn.dir.getINode4Write(root.toString()) assertTrue("The children list of root should be empty",
.asDirectory();
assertTrue("The children list of root should be empty",
rootNode.getChildrenList(Snapshot.CURRENT_STATE_ID).isEmpty()); rootNode.getChildrenList(Snapshot.CURRENT_STATE_ID).isEmpty());
// one snapshot on root: s1 // one snapshot on root: s1
DiffList<DirectoryDiff> diffList = rootNode.getDiffs().asList(); DiffList<DirectoryDiff> diffList = rootNode.getDiffs().asList();