HDFS-13252. Code refactoring: Remove Diff.ListType.

This commit is contained in:
Tsz-Wo Nicholas Sze 2018-03-09 15:25:41 -08:00
parent 4eeff62f69
commit ba0da2785d
13 changed files with 260 additions and 274 deletions

View File

@ -588,8 +588,7 @@ class FSDirRenameOp {
private INode srcChild; private INode srcChild;
private INode oldDstChild; private INode oldDstChild;
RenameOperation(FSDirectory fsd, INodesInPath srcIIP, INodesInPath dstIIP) RenameOperation(FSDirectory fsd, INodesInPath srcIIP, INodesInPath dstIIP) {
throws QuotaExceededException {
this.fsd = fsd; this.fsd = fsd;
this.srcIIP = srcIIP; this.srcIIP = srcIIP;
this.dstIIP = dstIIP; this.dstIIP = dstIIP;

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFea
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.hdfs.util.ReadOnlyList;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -353,7 +352,7 @@ public class INodeDirectory extends INodeWithAdditionalFields
// replace the instance in the created list of the diff list // replace the instance in the created list of the diff list
DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature(); DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature();
if (sf != null) { if (sf != null) {
sf.getDiffs().replaceChild(ListType.CREATED, oldChild, newChild); sf.getDiffs().replaceCreatedChild(oldChild, newChild);
} }
// update the inodeMap // update the inodeMap
@ -746,8 +745,8 @@ public class INodeDirectory extends INodeWithAdditionalFields
final INode newChild) throws QuotaExceededException { final INode newChild) throws QuotaExceededException {
DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature();
assert sf != null : "Directory does not have snapshot feature"; assert sf != null : "Directory does not have snapshot feature";
sf.getDiffs().removeChild(ListType.DELETED, oldChild); sf.getDiffs().removeDeletedChild(oldChild);
sf.getDiffs().replaceChild(ListType.CREATED, oldChild, newChild); sf.getDiffs().replaceCreatedChild(oldChild, newChild);
addChild(newChild, true, Snapshot.CURRENT_STATE_ID); addChild(newChild, true, Snapshot.CURRENT_STATE_ID);
} }
@ -761,8 +760,7 @@ public class INodeDirectory extends INodeWithAdditionalFields
int latestSnapshotId) throws QuotaExceededException { int latestSnapshotId) throws QuotaExceededException {
DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature(); DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature();
assert sf != null : "Directory does not have snapshot feature"; assert sf != null : "Directory does not have snapshot feature";
boolean removeDeletedChild = sf.getDiffs().removeChild(ListType.DELETED, boolean removeDeletedChild = sf.getDiffs().removeDeletedChild(deletedChild);
deletedChild);
int sid = removeDeletedChild ? Snapshot.CURRENT_STATE_ID : latestSnapshotId; int sid = removeDeletedChild ? Snapshot.CURRENT_STATE_ID : latestSnapshotId;
final boolean added = addChild(deletedChild, true, sid); final boolean added = addChild(deletedChild, true, sid);
// update quota usage if adding is successfully and the old child has not // update quota usage if adding is successfully and the old child has not

View File

@ -126,10 +126,6 @@ public abstract class INodeReference extends INode {
return referred; return referred;
} }
public final void setReferredINode(INode referred) {
this.referred = referred;
}
@Override @Override
public final boolean isReference() { public final boolean isReference() {
return true; return true;

View File

@ -44,7 +44,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount;
import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName;
import org.apache.hadoop.hdfs.server.namenode.INodesInPath; import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager; import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.hdfs.util.ReadOnlyList;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
@ -396,7 +395,7 @@ public class DirectorySnapshottableFeature extends DirectoryWithSnapshotFeature
.getId()); .getId());
for (INode child : children) { for (INode child : children) {
final byte[] name = child.getLocalNameBytes(); final byte[] name = child.getLocalNameBytes();
boolean toProcess = diff.searchIndex(ListType.DELETED, name) < 0; boolean toProcess = !diff.containsDeleted(name);
if (!toProcess && child instanceof INodeReference.WithName) { if (!toProcess && child instanceof INodeReference.WithName) {
byte[][] renameTargetPath = findRenameTargetPath( byte[][] renameTargetPath = findRenameTargetPath(
snapshotDir, (WithName) child, snapshotDir, (WithName) child,
@ -476,7 +475,7 @@ public class DirectorySnapshottableFeature extends DirectoryWithSnapshotFeature
} }
iterate = true; iterate = true;
level = level + 1; level = level + 1;
boolean toProcess = diff.searchIndex(ListType.DELETED, name) < 0; boolean toProcess = !diff.containsDeleted(name);
if (!toProcess && child instanceof INodeReference.WithName) { if (!toProcess && child instanceof INodeReference.WithName) {
byte[][] renameTargetPath = findRenameTargetPath(snapshotDir, byte[][] renameTargetPath = findRenameTargetPath(snapshotDir,
(WithName) child, Snapshot.getSnapshotId(later)); (WithName) child, Snapshot.getSnapshotId(later));

View File

@ -17,38 +17,22 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode.snapshot; package org.apache.hadoop.hdfs.server.namenode.snapshot;
import java.io.DataOutput; import com.google.common.base.Preconditions;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
import org.apache.hadoop.hdfs.server.namenode.AclStorage; import org.apache.hadoop.hdfs.server.namenode.*;
import org.apache.hadoop.hdfs.server.namenode.ContentCounts;
import org.apache.hadoop.hdfs.server.namenode.ContentSummaryComputationContext;
import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization;
import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectoryAttributes;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.INodeReference;
import org.apache.hadoop.hdfs.server.namenode.QuotaCounts;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotFSImageFormat.ReferenceMap; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotFSImageFormat.ReferenceMap;
import org.apache.hadoop.hdfs.util.Diff; import org.apache.hadoop.hdfs.util.Diff;
import org.apache.hadoop.hdfs.util.Diff.Container; import org.apache.hadoop.hdfs.util.Diff.Container;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import org.apache.hadoop.hdfs.util.Diff.UndoInfo; import org.apache.hadoop.hdfs.util.Diff.UndoInfo;
import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.hdfs.util.ReadOnlyList;
import com.google.common.base.Preconditions;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import java.io.DataOutput;
import java.io.IOException;
import java.util.*;
import static org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.NO_SNAPSHOT_ID; import static org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.NO_SNAPSHOT_ID;
/** /**
@ -70,56 +54,43 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
} }
/** /**
* Replace the given child from the created/deleted list. * Replace the given child from the created list.
* @return true if the child is replaced; false if the child is not found. * @return true if the child is replaced; false if the child is not found.
*/ */
private boolean replace(final ListType type, private boolean replaceCreated(final INode oldChild, final INode newChild) {
final INode oldChild, final INode newChild) { final List<INode> list = getCreatedUnmodifiable();
final List<INode> list = getList(type);
final int i = search(list, oldChild.getLocalNameBytes()); final int i = search(list, oldChild.getLocalNameBytes());
if (i < 0 || list.get(i).getId() != oldChild.getId()) { if (i < 0 || list.get(i).getId() != oldChild.getId()) {
return false; return false;
} }
final INode removed = list.set(i, newChild); final INode removed = setCreated(i, newChild);
Preconditions.checkState(removed == oldChild); Preconditions.checkState(removed == oldChild);
return true; return true;
} }
private boolean removeChild(ListType type, final INode child) {
final List<INode> list = getList(type);
final int i = searchIndex(type, child.getLocalNameBytes());
if (i >= 0 && list.get(i) == child) {
list.remove(i);
return true;
}
return false;
}
/** clear the created list */ /** clear the created list */
private void destroyCreatedList(INode.ReclaimContext reclaimContext, private void destroyCreatedList(INode.ReclaimContext reclaimContext,
final INodeDirectory currentINode) { final INodeDirectory currentINode) {
final List<INode> createdList = getList(ListType.CREATED); for (INode c : getCreatedUnmodifiable()) {
for (INode c : createdList) {
c.destroyAndCollectBlocks(reclaimContext); c.destroyAndCollectBlocks(reclaimContext);
// c should be contained in the children list, remove it // c should be contained in the children list, remove it
currentINode.removeChild(c); currentINode.removeChild(c);
} }
createdList.clear(); clearCreated();
} }
/** clear the deleted list */ /** clear the deleted list */
private void destroyDeletedList(INode.ReclaimContext reclaimContext) { private void destroyDeletedList(INode.ReclaimContext reclaimContext) {
final List<INode> deletedList = getList(ListType.DELETED); for (INode d : getDeletedUnmodifiable()) {
for (INode d : deletedList) {
d.destroyAndCollectBlocks(reclaimContext); d.destroyAndCollectBlocks(reclaimContext);
} }
deletedList.clear(); clearDeleted();
} }
/** Serialize {@link #created} */ /** Serialize {@link #created} */
private void writeCreated(DataOutput out) throws IOException { private void writeCreated(DataOutput out) throws IOException {
final List<INode> created = getList(ListType.CREATED); final List<INode> created = getCreatedUnmodifiable();
out.writeInt(created.size()); out.writeInt(created.size());
for (INode node : created) { for (INode node : created) {
// For INode in created list, we only need to record its local name // For INode in created list, we only need to record its local name
@ -132,7 +103,7 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
/** Serialize {@link #deleted} */ /** Serialize {@link #deleted} */
private void writeDeleted(DataOutput out, private void writeDeleted(DataOutput out,
ReferenceMap referenceMap) throws IOException { ReferenceMap referenceMap) throws IOException {
final List<INode> deleted = getList(ListType.DELETED); final List<INode> deleted = getDeletedUnmodifiable();
out.writeInt(deleted.size()); out.writeInt(deleted.size());
for (INode node : deleted) { for (INode node : deleted) {
FSImageSerialization.saveINode2Image(node, out, true, referenceMap); FSImageSerialization.saveINode2Image(node, out, true, referenceMap);
@ -148,7 +119,7 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
/** Get the list of INodeDirectory contained in the deleted list */ /** Get the list of INodeDirectory contained in the deleted list */
private void getDirsInDeleted(List<INodeDirectory> dirList) { private void getDirsInDeleted(List<INodeDirectory> dirList) {
for (INode node : getList(ListType.DELETED)) { for (INode node : getDeletedUnmodifiable()) {
if (node.isDirectory()) { if (node.isDirectory()) {
dirList.add(node.asDirectory()); dirList.add(node.asDirectory());
} }
@ -347,24 +318,24 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
} }
/** Replace the given child in the created/deleted list, if there is any. */ /** Replace the given child in the created/deleted list, if there is any. */
public boolean replaceChild(final ListType type, final INode oldChild, public boolean replaceCreatedChild(final INode oldChild,
final INode newChild) { final INode newChild) {
final DiffList<DirectoryDiff> diffList = asList(); final DiffList<DirectoryDiff> diffList = asList();
for(int i = diffList.size() - 1; i >= 0; i--) { for(int i = diffList.size() - 1; i >= 0; i--) {
final ChildrenDiff diff = diffList.get(i).diff; final ChildrenDiff diff = diffList.get(i).diff;
if (diff.replace(type, oldChild, newChild)) { if (diff.replaceCreated(oldChild, newChild)) {
return true; return true;
} }
} }
return false; return false;
} }
/** Remove the given child in the created/deleted list, if there is any. */ /** Remove the given child from the deleted list, if there is any. */
public boolean removeChild(final ListType type, final INode child) { public boolean removeDeletedChild(final INode child) {
final DiffList<DirectoryDiff> diffList = asList(); final DiffList<DirectoryDiff> diffList = asList();
for(int i = diffList.size() - 1; i >= 0; i--) { for(int i = diffList.size() - 1; i >= 0; i--) {
final ChildrenDiff diff = diffList.get(i).diff; final ChildrenDiff diff = diffList.get(i).diff;
if (diff.removeChild(type, child)) { if (diff.removeDeleted(child)) {
return true; return true;
} }
} }
@ -380,11 +351,9 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
public int findSnapshotDeleted(final INode child) { public int findSnapshotDeleted(final INode child) {
final DiffList<DirectoryDiff> diffList = asList(); final DiffList<DirectoryDiff> diffList = asList();
for(int i = diffList.size() - 1; i >= 0; i--) { for(int i = diffList.size() - 1; i >= 0; i--) {
final ChildrenDiff diff = diffList.get(i).diff; final DirectoryDiff diff = diffList.get(i);
final int d = diff.searchIndex(ListType.DELETED, if (diff.getChildrenDiff().containsDeleted(child)) {
child.getLocalNameBytes()); return diff.getSnapshotId();
if (d >= 0 && diff.getList(ListType.DELETED).get(d) == child) {
return diffList.get(i).getSnapshotId();
} }
} }
return NO_SNAPSHOT_ID; return NO_SNAPSHOT_ID;
@ -442,7 +411,7 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
DirectoryDiffList diffList = sf.getDiffs(); DirectoryDiffList diffList = sf.getDiffs();
DirectoryDiff priorDiff = diffList.getDiffById(prior); DirectoryDiff priorDiff = diffList.getDiffById(prior);
if (priorDiff != null && priorDiff.getSnapshotId() == prior) { if (priorDiff != null && priorDiff.getSnapshotId() == prior) {
List<INode> dList = priorDiff.diff.getList(ListType.DELETED); List<INode> dList = priorDiff.diff.getDeletedUnmodifiable();
excludedNodes = cloneDiffList(dList); excludedNodes = cloneDiffList(dList);
} }
@ -512,8 +481,8 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
} }
for (INode child : dir.getChildrenList(prior)) { for (INode child : dir.getChildrenList(prior)) {
if (priorChildrenDiff != null && priorChildrenDiff.search( if (priorChildrenDiff != null && priorChildrenDiff.getDeleted(
ListType.DELETED, child.getLocalNameBytes()) != null) { child.getLocalNameBytes()) != null) {
continue; continue;
} }
queue.addLast(child); queue.addLast(child);
@ -558,13 +527,15 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
boolean setModTime, int latestSnapshotId) throws QuotaExceededException { boolean setModTime, int latestSnapshotId) throws QuotaExceededException {
ChildrenDiff diff = diffs.checkAndAddLatestSnapshotDiff(latestSnapshotId, ChildrenDiff diff = diffs.checkAndAddLatestSnapshotDiff(latestSnapshotId,
parent).diff; parent).diff;
int undoInfo = diff.create(inode); final int undoInfo = diff.create(inode);
boolean added = false;
final boolean added = parent.addChild(inode, setModTime, try {
Snapshot.CURRENT_STATE_ID); added = parent.addChild(inode, setModTime, Snapshot.CURRENT_STATE_ID);
} finally {
if (!added) { if (!added) {
diff.undoCreate(inode, undoInfo); diff.undoCreate(inode, undoInfo);
} }
}
return added; return added;
} }
@ -587,13 +558,15 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
// any snapshot. // any snapshot.
ChildrenDiff diff = diffs.checkAndAddLatestSnapshotDiff(latestSnapshotId, ChildrenDiff diff = diffs.checkAndAddLatestSnapshotDiff(latestSnapshotId,
parent).diff; parent).diff;
UndoInfo<INode> undoInfo = diff.delete(child); final UndoInfo<INode> undoInfo = diff.delete(child);
boolean removed = false;
final boolean removed = parent.removeChild(child); try {
if (!removed && undoInfo != null) { removed = parent.removeChild(child);
// remove failed, undo } finally {
if (!removed) {
diff.undoDelete(child, undoInfo); diff.undoDelete(child, undoInfo);
} }
}
return removed; return removed;
} }
@ -648,7 +621,7 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
BlockStoragePolicySuite bsps, byte storagePolicyId) { BlockStoragePolicySuite bsps, byte storagePolicyId) {
final QuotaCounts counts = new QuotaCounts.Builder().build(); final QuotaCounts counts = new QuotaCounts.Builder().build();
for(DirectoryDiff d : diffs) { for(DirectoryDiff d : diffs) {
for(INode deleted : d.getChildrenDiff().getList(ListType.DELETED)) { for(INode deleted : d.getChildrenDiff().getDeletedUnmodifiable()) {
final byte childPolicyId = deleted.getStoragePolicyIDForQuota( final byte childPolicyId = deleted.getStoragePolicyIDForQuota(
storagePolicyId); storagePolicyId);
counts.add(deleted.computeQuotaUsage(bsps, childPolicyId, false, counts.add(deleted.computeQuotaUsage(bsps, childPolicyId, false,
@ -664,7 +637,7 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
ContentSummaryComputationContext summary = ContentSummaryComputationContext summary =
new ContentSummaryComputationContext(bsps); new ContentSummaryComputationContext(bsps);
for(DirectoryDiff d : diffs) { for(DirectoryDiff d : diffs) {
for(INode deleted : d.getChildrenDiff().getList(ListType.DELETED)) { for(INode deleted : d.getChildrenDiff().getDeletedUnmodifiable()) {
deleted.computeContentSummary(Snapshot.CURRENT_STATE_ID, summary); deleted.computeContentSummary(Snapshot.CURRENT_STATE_ID, summary);
} }
} }
@ -747,10 +720,8 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
if (prior != NO_SNAPSHOT_ID) { if (prior != NO_SNAPSHOT_ID) {
DirectoryDiff priorDiff = this.getDiffs().getDiffById(prior); DirectoryDiff priorDiff = this.getDiffs().getDiffById(prior);
if (priorDiff != null && priorDiff.getSnapshotId() == prior) { if (priorDiff != null && priorDiff.getSnapshotId() == prior) {
List<INode> cList = priorDiff.diff.getList(ListType.CREATED); priorCreated = cloneDiffList(priorDiff.diff.getCreatedUnmodifiable());
List<INode> dList = priorDiff.diff.getList(ListType.DELETED); priorDeleted = cloneDiffList(priorDiff.diff.getDeletedUnmodifiable());
priorCreated = cloneDiffList(cList);
priorDeleted = cloneDiffList(dList);
} }
} }
@ -770,8 +741,7 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
// cleanSubtreeRecursively call. // cleanSubtreeRecursively call.
if (priorCreated != null) { if (priorCreated != null) {
// we only check the node originally in prior's created list // we only check the node originally in prior's created list
for (INode cNode : priorDiff.getChildrenDiff().getList( for (INode cNode : priorDiff.diff.getCreatedUnmodifiable()) {
ListType.CREATED)) {
if (priorCreated.containsKey(cNode)) { if (priorCreated.containsKey(cNode)) {
cNode.cleanSubtree(reclaimContext, snapshot, NO_SNAPSHOT_ID); cNode.cleanSubtree(reclaimContext, snapshot, NO_SNAPSHOT_ID);
} }
@ -786,8 +756,7 @@ public class DirectoryWithSnapshotFeature implements INode.Feature {
// For files moved from posterior's deleted list, we also need to // For files moved from posterior's deleted list, we also need to
// delete its snapshot copy associated with the posterior snapshot. // delete its snapshot copy associated with the posterior snapshot.
for (INode dNode : priorDiff.getChildrenDiff().getList( for (INode dNode : priorDiff.diff.getDeletedUnmodifiable()) {
ListType.DELETED)) {
if (priorDeleted == null || !priorDeleted.containsKey(dNode)) { if (priorDeleted == null || !priorDeleted.containsKey(dNode)) {
cleanDeletedINode(reclaimContext, dNode, snapshot, prior); cleanDeletedINode(reclaimContext, dNode, snapshot, prior);
} }

View File

@ -78,7 +78,6 @@ import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeat
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.Root; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.Root;
import org.apache.hadoop.hdfs.server.namenode.XAttrFeature; import org.apache.hadoop.hdfs.server.namenode.XAttrFeature;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import org.apache.hadoop.hdfs.util.EnumCounters; import org.apache.hadoop.hdfs.util.EnumCounters;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
@ -581,10 +580,9 @@ public class FSImageFormatPBSnapshot {
buildINodeDirectory(copy, parent.getSaverContext())); buildINodeDirectory(copy, parent.getSaverContext()));
} }
// process created list and deleted list // process created list and deleted list
List<INode> created = diff.getChildrenDiff() List<INode> created = diff.getChildrenDiff().getCreatedUnmodifiable();
.getList(ListType.CREATED);
db.setCreatedListSize(created.size()); db.setCreatedListSize(created.size());
List<INode> deleted = diff.getChildrenDiff().getList(ListType.DELETED); List<INode> deleted = diff.getChildrenDiff().getDeletedUnmodifiable();
for (INode d : deleted) { for (INode d : deleted) {
if (d.isReference()) { if (d.isReference()) {
refList.add(d.asReference()); refList.add(d.asReference());

View File

@ -32,7 +32,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.INodeReference; import org.apache.hadoop.hdfs.server.namenode.INodeReference;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.SignedBytes; import com.google.common.primitives.SignedBytes;
@ -141,7 +140,7 @@ class SnapshotDiffInfo {
dirDiffMap.put(dir, diff); dirDiffMap.put(dir, diff);
diffMap.put(dir, relativePath); diffMap.put(dir, relativePath);
// detect rename // detect rename
for (INode created : diff.getList(ListType.CREATED)) { for (INode created : diff.getCreatedUnmodifiable()) {
if (created.isReference()) { if (created.isReference()) {
RenameEntry entry = getEntry(created.getId()); RenameEntry entry = getEntry(created.getId());
if (entry.getTargetPath() == null) { if (entry.getTargetPath() == null) {
@ -149,7 +148,7 @@ class SnapshotDiffInfo {
} }
} }
} }
for (INode deleted : diff.getList(ListType.DELETED)) { for (INode deleted : diff.getDeletedUnmodifiable()) {
if (deleted instanceof INodeReference.WithName) { if (deleted instanceof INodeReference.WithName) {
RenameEntry entry = getEntry(deleted.getId()); RenameEntry entry = getEntry(deleted.getId());
entry.setSource(deleted, relativePath); entry.setSource(deleted, relativePath);
@ -221,11 +220,9 @@ class SnapshotDiffInfo {
private List<DiffReportEntry> generateReport(ChildrenDiff dirDiff, private List<DiffReportEntry> generateReport(ChildrenDiff dirDiff,
byte[][] parentPath, boolean fromEarlier, Map<Long, RenameEntry> renameMap) { byte[][] parentPath, boolean fromEarlier, Map<Long, RenameEntry> renameMap) {
List<DiffReportEntry> list = new ChunkedArrayList<>(); List<DiffReportEntry> list = new ChunkedArrayList<>();
List<INode> created = dirDiff.getList(ListType.CREATED);
List<INode> deleted = dirDiff.getList(ListType.DELETED);
byte[][] fullPath = new byte[parentPath.length + 1][]; byte[][] fullPath = new byte[parentPath.length + 1][];
System.arraycopy(parentPath, 0, fullPath, 0, parentPath.length); System.arraycopy(parentPath, 0, fullPath, 0, parentPath.length);
for (INode cnode : created) { for (INode cnode : dirDiff.getCreatedUnmodifiable()) {
RenameEntry entry = renameMap.get(cnode.getId()); RenameEntry entry = renameMap.get(cnode.getId());
if (entry == null || !entry.isRename()) { if (entry == null || !entry.isRename()) {
fullPath[fullPath.length - 1] = cnode.getLocalNameBytes(); fullPath[fullPath.length - 1] = cnode.getLocalNameBytes();
@ -233,7 +230,7 @@ class SnapshotDiffInfo {
: DiffType.DELETE, fullPath)); : DiffType.DELETE, fullPath));
} }
} }
for (INode dnode : deleted) { for (INode dnode : dirDiff.getDeletedUnmodifiable()) {
RenameEntry entry = renameMap.get(dnode.getId()); RenameEntry entry = renameMap.get(dnode.getId());
if (entry != null && entry.isRename()) { if (entry != null && entry.isRename()) {
list.add(new DiffReportEntry(DiffType.RENAME, list.add(new DiffReportEntry(DiffType.RENAME,

View File

@ -28,7 +28,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.INodeReference; import org.apache.hadoop.hdfs.server.namenode.INodeReference;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.util.ChunkedArrayList; import org.apache.hadoop.util.ChunkedArrayList;
@ -96,10 +95,10 @@ class SnapshotDiffListingInfo {
} }
} }
if (lastIndex == -1 || lastIndex < diff.getList(ListType.CREATED).size()) { final List<INode> clist = diff.getCreatedUnmodifiable();
if (lastIndex == -1 || lastIndex < clist.size()) {
ListIterator<INode> iterator = lastIndex != -1 ? ListIterator<INode> iterator = lastIndex != -1 ?
diff.getList(ListType.CREATED).listIterator(lastIndex) clist.listIterator(lastIndex): clist.listIterator();
: diff.getList(ListType.CREATED).listIterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
if (getTotalEntries() < maxEntries) { if (getTotalEntries() < maxEntries) {
INode created = iterator.next(); INode created = iterator.next();
@ -115,11 +114,11 @@ class SnapshotDiffListingInfo {
setLastIndex(-1); setLastIndex(-1);
} }
if (lastIndex == -1 || lastIndex >= diff.getList(ListType.CREATED).size()) { if (lastIndex == -1 || lastIndex >= clist.size()) {
int size = diff.getList(ListType.DELETED).size(); final List<INode> dlist = diff.getDeletedUnmodifiable();
int size = dlist.size();
ListIterator<INode> iterator = lastIndex != -1 ? ListIterator<INode> iterator = lastIndex != -1 ?
diff.getList(ListType.DELETED).listIterator(lastIndex - size) dlist.listIterator(lastIndex - size): dlist.listIterator();
: diff.getList(ListType.DELETED).listIterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
if (getTotalEntries() < maxEntries) { if (getTotalEntries() < maxEntries) {
final INode d = iterator.next(); final INode d = iterator.next();

View File

@ -38,7 +38,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeReference;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList;
import org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff; import org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.hdfs.util.ReadOnlyList;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
@ -145,8 +144,7 @@ public class SnapshotFSImageFormat {
// the INode in the created list should be a reference to another INode // the INode in the created list should be a reference to another INode
// in posterior SnapshotDiffs or one of the current children // in posterior SnapshotDiffs or one of the current children
for (DirectoryDiff postDiff : parent.getDiffs()) { for (DirectoryDiff postDiff : parent.getDiffs()) {
final INode d = postDiff.getChildrenDiff().search(ListType.DELETED, final INode d = postDiff.getChildrenDiff().getDeleted(createdNodeName);
createdNodeName);
if (d != null) { if (d != null) {
return d; return d;
} // else go to the next SnapshotDiff } // else go to the next SnapshotDiff

View File

@ -73,10 +73,6 @@ import com.google.common.base.Preconditions;
* @param <E> The element type, which must implement {@link Element} interface. * @param <E> The element type, which must implement {@link Element} interface.
*/ */
public class Diff<K, E extends Diff.Element<K>> { public class Diff<K, E extends Diff.Element<K>> {
public enum ListType {
CREATED, DELETED
}
/** An interface for the elements in a {@link Diff}. */ /** An interface for the elements in a {@link Diff}. */
public static interface Element<K> extends Comparable<K> { public static interface Element<K> extends Comparable<K> {
/** @return the key of this object. */ /** @return the key of this object. */
@ -156,24 +152,71 @@ public class Diff<K, E extends Diff.Element<K>> {
this.deleted = deleted; this.deleted = deleted;
} }
/** @return the created list, which is never null. */ public List<E> getCreatedUnmodifiable() {
public List<E> getList(final ListType type) { return created != null? Collections.unmodifiableList(created)
final List<E> list = type == ListType.CREATED? created: deleted; : Collections.emptyList();
return list == null? Collections.<E>emptyList(): list;
} }
public int searchIndex(final ListType type, final K name) { public E setCreated(int index, E element) {
return search(getList(type), name); final E old = created.set(index, element);
if (old.compareTo(element.getKey()) != 0) {
throw new AssertionError("Element mismatched: element=" + element
+ " but old=" + old);
}
return old;
}
public void clearCreated() {
if (created != null) {
created.clear();
}
}
public List<E> getDeletedUnmodifiable() {
return deleted != null? Collections.unmodifiableList(deleted)
: Collections.emptyList();
}
public boolean containsDeleted(final K key) {
if (deleted != null) {
return search(deleted, key) >= 0;
}
return false;
}
public boolean containsDeleted(final E element) {
return getDeleted(element.getKey()) == element;
} }
/** /**
* @return null if the element is not found; * @return null if the element is not found;
* otherwise, return the element in the created/deleted list. * otherwise, return the element in the deleted list.
*/ */
public E search(final ListType type, final K name) { public E getDeleted(final K key) {
final List<E> list = getList(type); if (deleted != null) {
final int c = search(list, name); final int c = search(deleted, key);
return c < 0 ? null : list.get(c); if (c >= 0) {
return deleted.get(c);
}
}
return null;
}
public boolean removeDeleted(final E element) {
if (deleted != null) {
final int i = search(deleted, element.getKey());
if (i >= 0 && deleted.get(i) == element) {
deleted.remove(i);
return true;
}
}
return false;
}
public void clearDeleted() {
if (deleted != null) {
deleted.clear();
}
} }
/** @return true if no changes contained in the diff */ /** @return true if no changes contained in the diff */
@ -183,26 +226,36 @@ public class Diff<K, E extends Diff.Element<K>> {
} }
/** /**
* Insert the given element to the created/deleted list. * Add the given element to the created list,
* provided the element does not exist, i.e. i < 0.
*
* @param i the insertion point defined * @param i the insertion point defined
* in {@link Collections#binarySearch(List, Object)} * in {@link Collections#binarySearch(List, Object)}
* @throws AssertionError if i >= 0.
*/ */
private void insert(final ListType type, final E element, final int i) { private void addCreated(final E element, final int i) {
List<E> list = type == ListType.CREATED? created: deleted;
if (i >= 0) { if (i >= 0) {
throw new AssertionError("Element already exists: element=" + element throw new AssertionError("Element already exists: element=" + element
+ ", " + type + "=" + list); + ", created=" + created);
} }
if (list == null) { if (created == null) {
list = new ArrayList<E>(DEFAULT_ARRAY_INITIAL_CAPACITY); created = new ArrayList<>(DEFAULT_ARRAY_INITIAL_CAPACITY);
if (type == ListType.CREATED) {
created = list;
} else if (type == ListType.DELETED){
deleted = list;
} }
created.add(-i - 1, element);
} }
list.add(-i - 1, element);
/** Similar to {@link #addCreated(Element, int)} but for the deleted list. */
private void addDeleted(final E element, final int i) {
if (i >= 0) {
throw new AssertionError("Element already exists: element=" + element
+ ", deleted=" + deleted);
} }
if (deleted == null) {
deleted = new ArrayList<>(DEFAULT_ARRAY_INITIAL_CAPACITY);
}
deleted.add(-i - 1, element);
}
/** /**
* Create an element in current state. * Create an element in current state.
@ -210,7 +263,7 @@ public class Diff<K, E extends Diff.Element<K>> {
*/ */
public int create(final E element) { public int create(final E element) {
final int c = search(created, element.getKey()); final int c = search(created, element.getKey());
insert(ListType.CREATED, element, c); addCreated(element, c);
return c; return c;
} }
@ -236,7 +289,7 @@ public class Diff<K, E extends Diff.Element<K>> {
} else { } else {
// not in c-list, it must be in previous // not in c-list, it must be in previous
d = search(deleted, element.getKey()); d = search(deleted, element.getKey());
insert(ListType.DELETED, element, d); addDeleted(element, d);
} }
return new UndoInfo<E>(c, previous, d); return new UndoInfo<E>(c, previous, d);
} }
@ -277,8 +330,8 @@ public class Diff<K, E extends Diff.Element<K>> {
d = search(deleted, oldElement.getKey()); d = search(deleted, oldElement.getKey());
if (d < 0) { if (d < 0) {
// Case 2.3: neither in c-list nor d-list // Case 2.3: neither in c-list nor d-list
insert(ListType.CREATED, newElement, c); addCreated(newElement, c);
insert(ListType.DELETED, oldElement, d); addDeleted(oldElement, d);
} }
} }
return new UndoInfo<E>(c, previous, d); return new UndoInfo<E>(c, previous, d);
@ -348,7 +401,7 @@ public class Diff<K, E extends Diff.Element<K>> {
*/ */
public List<E> apply2Previous(final List<E> previous) { public List<E> apply2Previous(final List<E> previous) {
return apply2Previous(previous, return apply2Previous(previous,
getList(ListType.CREATED), getList(ListType.DELETED)); getCreatedUnmodifiable(), getDeletedUnmodifiable());
} }
private static <K, E extends Diff.Element<K>> List<E> apply2Previous( private static <K, E extends Diff.Element<K>> List<E> apply2Previous(
@ -408,7 +461,7 @@ public class Diff<K, E extends Diff.Element<K>> {
*/ */
public List<E> apply2Current(final List<E> current) { public List<E> apply2Current(final List<E> current) {
return apply2Previous(current, return apply2Previous(current,
getList(ListType.DELETED), getList(ListType.CREATED)); getDeletedUnmodifiable(), getCreatedUnmodifiable());
} }
/** /**
@ -443,8 +496,10 @@ public class Diff<K, E extends Diff.Element<K>> {
*/ */
public void combinePosterior(final Diff<K, E> posterior, public void combinePosterior(final Diff<K, E> posterior,
final Processor<E> deletedProcesser) { final Processor<E> deletedProcesser) {
final Iterator<E> createdIterator = posterior.getList(ListType.CREATED).iterator(); final Iterator<E> createdIterator
final Iterator<E> deletedIterator = posterior.getList(ListType.DELETED).iterator(); = posterior.getCreatedUnmodifiable().iterator();
final Iterator<E> deletedIterator
= posterior.getDeletedUnmodifiable().iterator();
E c = createdIterator.hasNext()? createdIterator.next(): null; E c = createdIterator.hasNext()? createdIterator.next(): null;
E d = deletedIterator.hasNext()? deletedIterator.next(): null; E d = deletedIterator.hasNext()? deletedIterator.next(): null;
@ -479,7 +534,7 @@ public class Diff<K, E extends Diff.Element<K>> {
@Override @Override
public String toString() { public String toString() {
return getClass().getSimpleName() return getClass().getSimpleName()
+ "{created=" + getList(ListType.CREATED) + "{created=" + getCreatedUnmodifiable()
+ ", deleted=" + getList(ListType.DELETED) + "}"; + ", deleted=" + getDeletedUnmodifiable() + "}";
} }
} }

View File

@ -17,23 +17,6 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode.snapshot; package org.apache.hadoop.hdfs.server.namenode.snapshot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -46,47 +29,71 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeature; import org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeature;
import org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceStorage; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
import org.apache.hadoop.hdfs.server.datanode.BlockScanner; import org.apache.hadoop.hdfs.server.datanode.*;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.checker.DatasetVolumeChecker;
import org.apache.hadoop.hdfs.server.datanode.DirectoryScanner; import org.apache.hadoop.hdfs.server.datanode.checker.ThrottledAsyncChecker;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.*;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.top.metrics.TopMetrics;
import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.http.HttpRequestLog;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.ipc.ProtobufRpcEngine.Server; import org.apache.hadoop.ipc.ProtobufRpcEngine.Server;
import org.apache.hadoop.metrics2.impl.MetricsSystemImpl; import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.GSet;
import org.junit.Assert; import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/** /**
* Helper for writing snapshot related tests * Helper for writing snapshot related tests
*/ */
public class SnapshotTestHelper { public class SnapshotTestHelper {
public static final Log LOG = LogFactory.getLog(SnapshotTestHelper.class); static final Logger LOG = LoggerFactory.getLogger(SnapshotTestHelper.class);
/** Disable the logs that are not very useful for snapshot related tests. */ /** Disable the logs that are not very useful for snapshot related tests. */
public static void disableLogs() { public static void disableLogs() {
final String[] lognames = { final String[] lognames = {
"org.apache.hadoop.hdfs.server.common.Util",
"org.apache.hadoop.hdfs.server.blockmanagement.BlockReportLeaseManager",
"org.apache.hadoop.hdfs.server.namenode.FileJournalManager",
"org.apache.hadoop.hdfs.server.namenode.NNStorageRetentionManager",
"org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf",
"org.apache.hadoop.hdfs.server.namenode.FSEditLog",
"org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceScanner", "org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceScanner",
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice",
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl", "org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl",
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetAsyncDiskService", "org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetAsyncDiskService",
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl" +
".RamDiskAsyncLazyPersistService",
}; };
for(String n : lognames) { for(String n : lognames) {
GenericTestUtils.disableLog(LogFactory.getLog(n)); GenericTestUtils.disableLog(LoggerFactory.getLogger(n));
} }
GenericTestUtils.disableLog(LogFactory.getLog(UserGroupInformation.class)); GenericTestUtils.disableLog(LoggerFactory.getLogger(
GenericTestUtils.disableLog(LogFactory.getLog(BlockManager.class)); UserGroupInformation.class));
GenericTestUtils.disableLog(LogFactory.getLog(FSNamesystem.class)); GenericTestUtils.disableLog(LoggerFactory.getLogger(BlockManager.class));
GenericTestUtils.disableLog(LogFactory.getLog(DirectoryScanner.class)); GenericTestUtils.disableLog(LoggerFactory.getLogger(FSNamesystem.class));
GenericTestUtils.disableLog(LogFactory.getLog(MetricsSystemImpl.class)); GenericTestUtils.disableLog(LoggerFactory.getLogger(
DirectoryScanner.class));
GenericTestUtils.disableLog(LoggerFactory.getLogger(
MetricsSystemImpl.class));
GenericTestUtils.disableLog(DatasetVolumeChecker.LOG);
GenericTestUtils.disableLog(DatanodeDescriptor.LOG);
GenericTestUtils.disableLog(GSet.LOG);
GenericTestUtils.disableLog(TopMetrics.LOG);
GenericTestUtils.disableLog(HttpRequestLog.LOG);
GenericTestUtils.disableLog(ThrottledAsyncChecker.LOG);
GenericTestUtils.disableLog(VolumeScanner.LOG);
GenericTestUtils.disableLog(BlockScanner.LOG); GenericTestUtils.disableLog(BlockScanner.LOG);
GenericTestUtils.disableLog(HttpServer2.LOG); GenericTestUtils.disableLog(HttpServer2.LOG);
GenericTestUtils.disableLog(DataNode.LOG); GenericTestUtils.disableLog(DataNode.LOG);

View File

@ -17,22 +17,6 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode.snapshot; package org.apache.hadoop.hdfs.server.namenode.snapshot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import java.io.File;
import java.io.IOException;
import java.util.EnumSet;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -41,12 +25,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.*;
import org.apache.hadoop.hdfs.DFSOutputStream;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
@ -55,18 +34,10 @@ import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffReportEntry; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffReportEntry;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffType; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffType;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.*;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.INodeReference;
import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount;
import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
import org.apache.hadoop.hdfs.server.namenode.QuotaCounts;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import org.apache.hadoop.hdfs.util.ReadOnlyList; import org.apache.hadoop.hdfs.util.ReadOnlyList;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After; import org.junit.After;
@ -76,6 +47,18 @@ import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox; import org.mockito.internal.util.reflection.Whitebox;
import java.io.File;
import java.io.IOException;
import java.util.EnumSet;
import java.util.List;
import java.util.Random;
import static org.junit.Assert.*;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
/** Testing rename with snapshots. */ /** Testing rename with snapshots. */
public class TestRenameWithSnapshots { public class TestRenameWithSnapshots {
static { static {
@ -104,6 +87,10 @@ public class TestRenameWithSnapshots {
static private final String snap1 = "snap1"; static private final String snap1 = "snap1";
static private final String snap2 = "snap2"; static private final String snap2 = "snap2";
static void assertSizes(int createdSize, int deletedSize, ChildrenDiff diff) {
assertEquals(createdSize, diff.getCreatedUnmodifiable().size());
assertEquals(deletedSize, diff.getDeletedUnmodifiable().size());
}
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
@ -1301,8 +1288,7 @@ public class TestRenameWithSnapshots {
// after the undo of rename, both the created and deleted list of sdir1 // after the undo of rename, both the created and deleted list of sdir1
// should be empty // should be empty
ChildrenDiff childrenDiff = dir1Diffs.get(0).getChildrenDiff(); ChildrenDiff childrenDiff = dir1Diffs.get(0).getChildrenDiff();
assertEquals(0, childrenDiff.getList(ListType.DELETED).size()); assertSizes(0, 0, childrenDiff);
assertEquals(0, childrenDiff.getList(ListType.CREATED).size());
INode fooNode = fsdir.getINode4Write(foo.toString()); INode fooNode = fsdir.getINode4Write(foo.toString());
assertTrue(fooNode.isDirectory() && fooNode.asDirectory().isWithSnapshot()); assertTrue(fooNode.isDirectory() && fooNode.asDirectory().isWithSnapshot());
@ -1372,12 +1358,11 @@ public class TestRenameWithSnapshots {
// after the undo of rename, the created list of sdir1 should contain // after the undo of rename, the created list of sdir1 should contain
// 1 element // 1 element
ChildrenDiff childrenDiff = dir1Diffs.get(0).getChildrenDiff(); ChildrenDiff childrenDiff = dir1Diffs.get(0).getChildrenDiff();
assertEquals(0, childrenDiff.getList(ListType.DELETED).size()); assertSizes(1, 0, childrenDiff);
assertEquals(1, childrenDiff.getList(ListType.CREATED).size());
INode fooNode = fsdir.getINode4Write(foo.toString()); INode fooNode = fsdir.getINode4Write(foo.toString());
assertTrue(fooNode instanceof INodeDirectory); assertTrue(fooNode instanceof INodeDirectory);
assertTrue(childrenDiff.getList(ListType.CREATED).get(0) == fooNode); assertTrue(childrenDiff.getCreatedUnmodifiable().get(0) == fooNode);
final Path foo_s1 = SnapshotTestHelper.getSnapshotPath(sdir1, "s1", "foo"); final Path foo_s1 = SnapshotTestHelper.getSnapshotPath(sdir1, "s1", "foo");
assertFalse(hdfs.exists(foo_s1)); assertFalse(hdfs.exists(foo_s1));
@ -1438,13 +1423,12 @@ public class TestRenameWithSnapshots {
assertEquals(1, dir2Diffs.size()); assertEquals(1, dir2Diffs.size());
assertEquals(s2.getId(), dir2Diffs.get(0).getSnapshotId()); assertEquals(s2.getId(), dir2Diffs.get(0).getSnapshotId());
ChildrenDiff childrenDiff = dir2Diffs.get(0).getChildrenDiff(); ChildrenDiff childrenDiff = dir2Diffs.get(0).getChildrenDiff();
assertEquals(0, childrenDiff.getList(ListType.DELETED).size()); assertSizes(1, 0, childrenDiff);
assertEquals(1, childrenDiff.getList(ListType.CREATED).size());
final Path foo_s2 = SnapshotTestHelper.getSnapshotPath(sdir2, "s2", "foo2"); final Path foo_s2 = SnapshotTestHelper.getSnapshotPath(sdir2, "s2", "foo2");
assertFalse(hdfs.exists(foo_s2)); assertFalse(hdfs.exists(foo_s2));
INode fooNode = fsdir.getINode4Write(foo_dir2.toString()); INode fooNode = fsdir.getINode4Write(foo_dir2.toString());
assertTrue(childrenDiff.getList(ListType.CREATED).get(0) == fooNode); assertTrue(childrenDiff.getCreatedUnmodifiable().get(0) == fooNode);
assertTrue(fooNode instanceof INodeReference.DstReference); assertTrue(fooNode instanceof INodeReference.DstReference);
DiffList<DirectoryDiff> fooDiffs = DiffList<DirectoryDiff> fooDiffs =
fooNode.asDirectory().getDiffs().asList(); fooNode.asDirectory().getDiffs().asList();
@ -1468,13 +1452,11 @@ public class TestRenameWithSnapshots {
assertEquals(s3.getId(), dir2Diffs.get(1).getSnapshotId()); assertEquals(s3.getId(), dir2Diffs.get(1).getSnapshotId());
childrenDiff = dir2Diffs.get(0).getChildrenDiff(); childrenDiff = dir2Diffs.get(0).getChildrenDiff();
assertEquals(0, childrenDiff.getList(ListType.DELETED).size()); assertSizes(1, 0, childrenDiff);
assertEquals(1, childrenDiff.getList(ListType.CREATED).size()); assertTrue(childrenDiff.getCreatedUnmodifiable().get(0) == fooNode);
assertTrue(childrenDiff.getList(ListType.CREATED).get(0) == fooNode);
childrenDiff = dir2Diffs.get(1).getChildrenDiff(); childrenDiff = dir2Diffs.get(1).getChildrenDiff();
assertEquals(0, childrenDiff.getList(ListType.DELETED).size()); assertSizes(0, 0, childrenDiff);
assertEquals(0, childrenDiff.getList(ListType.CREATED).size());
final Path foo_s3 = SnapshotTestHelper.getSnapshotPath(sdir2, "s3", "foo2"); final Path foo_s3 = SnapshotTestHelper.getSnapshotPath(sdir2, "s3", "foo2");
assertFalse(hdfs.exists(foo_s2)); assertFalse(hdfs.exists(foo_s2));
@ -1600,8 +1582,7 @@ public class TestRenameWithSnapshots {
.getDiffs().asList(); .getDiffs().asList();
assertEquals(1, diffList.size()); assertEquals(1, diffList.size());
DirectoryDiff diff = diffList.get(0); DirectoryDiff diff = diffList.get(0);
assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty()); assertSizes(0, 0, diff.getChildrenDiff());
assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
// check dir2 // check dir2
INodeDirectory dir2Node = fsdir2.getINode4Write(dir2.toString()).asDirectory(); INodeDirectory dir2Node = fsdir2.getINode4Write(dir2.toString()).asDirectory();
@ -1618,8 +1599,7 @@ public class TestRenameWithSnapshots {
diffList = dir2Node.getDiffs().asList(); diffList = dir2Node.getDiffs().asList();
assertEquals(1, diffList.size()); assertEquals(1, diffList.size());
diff = diffList.get(0); diff = diffList.get(0);
assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty()); assertSizes(0, 0, diff.getChildrenDiff());
assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
} }
/** /**
@ -1674,8 +1654,7 @@ public class TestRenameWithSnapshots {
.getDiffs().asList(); .getDiffs().asList();
assertEquals(1, diffList.size()); assertEquals(1, diffList.size());
DirectoryDiff diff = diffList.get(0); DirectoryDiff diff = diffList.get(0);
assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty()); assertSizes(0, 0, diff.getChildrenDiff());
assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
// check dir2 // check dir2
INodeDirectory dir2Node = fsdir2.getINode4Write(dir2.toString()).asDirectory(); INodeDirectory dir2Node = fsdir2.getINode4Write(dir2.toString()).asDirectory();
@ -1696,8 +1675,7 @@ public class TestRenameWithSnapshots {
diffList = ( dir2Node).getDiffs().asList(); diffList = ( dir2Node).getDiffs().asList();
assertEquals(1, diffList.size()); assertEquals(1, diffList.size());
diff = diffList.get(0); diff = diffList.get(0);
assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty()); assertSizes(0, 0, diff.getChildrenDiff());
assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
} }
/** /**
@ -1737,8 +1715,7 @@ public class TestRenameWithSnapshots {
Snapshot s1 = rootNode.getSnapshot(DFSUtil.string2Bytes(snap1)); Snapshot s1 = rootNode.getSnapshot(DFSUtil.string2Bytes(snap1));
assertEquals(s1.getId(), diff.getSnapshotId()); assertEquals(s1.getId(), diff.getSnapshotId());
// after undo, the diff should be empty // after undo, the diff should be empty
assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty()); assertSizes(0, 0, diff.getChildrenDiff());
assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty());
// bar was converted to filewithsnapshot while renaming // bar was converted to filewithsnapshot while renaming
INodeFile barNode = fsdir.getINode4Write(bar.toString()).asFile(); INodeFile barNode = fsdir.getINode4Write(bar.toString()).asFile();
@ -1989,8 +1966,7 @@ public class TestRenameWithSnapshots {
Snapshot s1 = dir1Node.getSnapshot(DFSUtil.string2Bytes("s1")); Snapshot s1 = dir1Node.getSnapshot(DFSUtil.string2Bytes("s1"));
assertEquals(s1.getId(), diffList.get(0).getSnapshotId()); assertEquals(s1.getId(), diffList.get(0).getSnapshotId());
ChildrenDiff diff = diffList.get(0).getChildrenDiff(); ChildrenDiff diff = diffList.get(0).getChildrenDiff();
assertEquals(0, diff.getList(ListType.CREATED).size()); assertSizes(0, 0, diff);
assertEquals(0, diff.getList(ListType.DELETED).size());
restartClusterAndCheckImage(true); restartClusterAndCheckImage(true);
} }
@ -2062,8 +2038,7 @@ public class TestRenameWithSnapshots {
assertEquals(s1.getId(), diffList.get(0).getSnapshotId()); assertEquals(s1.getId(), diffList.get(0).getSnapshotId());
ChildrenDiff diff = diffList.get(0).getChildrenDiff(); ChildrenDiff diff = diffList.get(0).getChildrenDiff();
// bar2 and bar3 in the created list // bar2 and bar3 in the created list
assertEquals(2, diff.getList(ListType.CREATED).size()); assertSizes(2, 0, diff);
assertEquals(0, diff.getList(ListType.DELETED).size());
final INode fooRef2 = fsdir.getINode4Write(foo.toString()); final INode fooRef2 = fsdir.getINode4Write(foo.toString());
assertTrue(fooRef2 instanceof INodeReference.DstReference); assertTrue(fooRef2 instanceof INodeReference.DstReference);
@ -2235,13 +2210,9 @@ public class TestRenameWithSnapshots {
.asDirectory(); .asDirectory();
DiffList<DirectoryDiff> dir1DiffList = dir1Node.getDiffs().asList(); DiffList<DirectoryDiff> dir1DiffList = dir1Node.getDiffs().asList();
assertEquals(1, dir1DiffList.size()); assertEquals(1, dir1DiffList.size());
List<INode> dList = dir1DiffList.get(0).getChildrenDiff() final ChildrenDiff childrenDiff = dir1DiffList.get(0).getChildrenDiff();
.getList(ListType.DELETED); assertSizes(1, 0, childrenDiff);
assertTrue(dList.isEmpty()); INode cNode = childrenDiff.getCreatedUnmodifiable().get(0);
List<INode> cList = dir1DiffList.get(0).getChildrenDiff()
.getList(ListType.CREATED);
assertEquals(1, cList.size());
INode cNode = cList.get(0);
INode fooNode = fsdir.getINode4Write(newfoo.toString()); INode fooNode = fsdir.getINode4Write(newfoo.toString());
assertSame(cNode, fooNode); assertSame(cNode, fooNode);
@ -2259,7 +2230,7 @@ public class TestRenameWithSnapshots {
Snapshot s0 = testNode.getSnapshot(DFSUtil.string2Bytes("s0")); Snapshot s0 = testNode.getSnapshot(DFSUtil.string2Bytes("s0"));
assertEquals(s0.getId(), diff.getSnapshotId()); assertEquals(s0.getId(), diff.getSnapshotId());
// and file should be stored in the deleted list of this snapshot diff // and file should be stored in the deleted list of this snapshot diff
assertEquals("file", diff.getChildrenDiff().getList(ListType.DELETED) assertEquals("file", diff.getChildrenDiff().getDeletedUnmodifiable()
.get(0).getLocalName()); .get(0).getLocalName());
// check dir2: a WithName instance for foo should be in the deleted list // check dir2: a WithName instance for foo should be in the deleted list
@ -2269,7 +2240,8 @@ public class TestRenameWithSnapshots {
DiffList<DirectoryDiff> dir2DiffList = dir2Node.getDiffs().asList(); DiffList<DirectoryDiff> dir2DiffList = dir2Node.getDiffs().asList();
// dir2Node should contain 1 snapshot diffs for s2 // dir2Node should contain 1 snapshot diffs for s2
assertEquals(1, dir2DiffList.size()); assertEquals(1, dir2DiffList.size());
dList = dir2DiffList.get(0).getChildrenDiff().getList(ListType.DELETED); final List<INode> dList = dir2DiffList.get(0).getChildrenDiff()
.getDeletedUnmodifiable();
assertEquals(1, dList.size()); assertEquals(1, dList.size());
final Path foo_s2 = SnapshotTestHelper.getSnapshotPath(dir2, "s2", final Path foo_s2 = SnapshotTestHelper.getSnapshotPath(dir2, "s2",
foo.getName()); foo.getName());
@ -2323,8 +2295,7 @@ public class TestRenameWithSnapshots {
DiffList<DirectoryDiff> diffList = barNode.getDiffs().asList(); DiffList<DirectoryDiff> diffList = barNode.getDiffs().asList();
assertEquals(1, diffList.size()); assertEquals(1, diffList.size());
DirectoryDiff diff = diffList.get(0); DirectoryDiff diff = diffList.get(0);
assertEquals(0, diff.getChildrenDiff().getList(ListType.DELETED).size()); assertSizes(0, 0, diff.getChildrenDiff());
assertEquals(0, diff.getChildrenDiff().getList(ListType.CREATED).size());
} }
/** /**

View File

@ -38,7 +38,6 @@ import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
import org.apache.hadoop.hdfs.util.Diff.ListType;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
@ -153,8 +152,9 @@ public class TestSetQuotaWithSnapshot {
subNode.asDirectory().getDiffs().asList(); subNode.asDirectory().getDiffs().asList();
assertEquals(1, diffList.size()); assertEquals(1, diffList.size());
Snapshot s2 = dirNode.getSnapshot(DFSUtil.string2Bytes("s2")); Snapshot s2 = dirNode.getSnapshot(DFSUtil.string2Bytes("s2"));
assertEquals(s2.getId(), diffList.get(0).getSnapshotId()); final DirectoryDiff diff = diffList.get(0);
List<INode> createdList = diffList.get(0).getChildrenDiff().getList(ListType.CREATED); assertEquals(s2.getId(), diff.getSnapshotId());
List<INode> createdList = diff.getChildrenDiff().getCreatedUnmodifiable();
assertEquals(1, createdList.size()); assertEquals(1, createdList.size());
assertSame(fsdir.getINode4Write(file.toString()), createdList.get(0)); assertSame(fsdir.getINode4Write(file.toString()), createdList.get(0));
} }