diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt index 2a7b10effba..96dceb70598 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt @@ -335,3 +335,6 @@ Branch-2802 Snapshot (Unreleased) HDFS-4791. Update and fix deletion of reference inode. (Jing Zhao via szetszwo) + + HDFS-4798. Update computeContentSummary() for the reference nodes in + snapshots. (szetszwo) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/Content.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/Content.java index bb700051ccc..5dfa3e952f5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/Content.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/Content.java @@ -42,16 +42,20 @@ public enum Content { /** Content counts. */ public static class Counts extends EnumCounters { + public static Counts newInstance() { + return new Counts(); + } + private Counts() { super(Content.values()); } } - static final EnumCounters.Factory FACTORY + private static final EnumCounters.Factory FACTORY = new EnumCounters.Factory() { @Override public Counts newInstance() { - return new Counts(); + return Counts.newInstance(); } }; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java index 541f591113a..2cc2a9931e1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; -import org.apache.hadoop.hdfs.server.namenode.Content.CountsMap.Key; import org.apache.hadoop.hdfs.server.namenode.INodeReference.DstReference; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName; import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot; @@ -369,23 +368,14 @@ public abstract class INode implements Diff.Element { /** Compute {@link ContentSummary}. */ public final ContentSummary computeContentSummary() { - final Content.Counts current = computeContentSummary( - new Content.CountsMap()).getCounts(Key.CURRENT); - return new ContentSummary(current.get(Content.LENGTH), - current.get(Content.FILE) + current.get(Content.SYMLINK), - current.get(Content.DIRECTORY), getNsQuota(), - current.get(Content.DISKSPACE), getDsQuota()); + final Content.Counts counts = computeContentSummary( + Content.Counts.newInstance()); + return new ContentSummary(counts.get(Content.LENGTH), + counts.get(Content.FILE) + counts.get(Content.SYMLINK), + counts.get(Content.DIRECTORY), getNsQuota(), + counts.get(Content.DISKSPACE), getDsQuota()); } - /** - * Count subtree content summary with a {@link Content.CountsMap}. - * - * @param countsMap The subtree counts for returning. - * @return The same objects as the counts parameter. - */ - public abstract Content.CountsMap computeContentSummary( - Content.CountsMap countsMap); - /** * Count subtree content summary with a {@link Content.Counts}. * diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java index db7a27f4b6a..78aa46b30fd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java @@ -29,7 +29,6 @@ import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; -import org.apache.hadoop.hdfs.server.namenode.Content.CountsMap.Key; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectorySnapshottable; import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectoryWithSnapshot; @@ -473,16 +472,6 @@ public class INodeDirectory extends INodeWithAdditionalFields { return counts; } - @Override - public Content.CountsMap computeContentSummary( - final Content.CountsMap countsMap) { - for (INode child : getChildrenList(null)) { - child.computeContentSummary(countsMap); - } - countsMap.getCounts(Key.CURRENT).add(Content.DIRECTORY, 1); - return countsMap; - } - /** * @param snapshot * if it is not null, get the result from the given snapshot; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectoryWithQuota.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectoryWithQuota.java index 1148f320aef..b623fe5c06e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectoryWithQuota.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectoryWithQuota.java @@ -106,15 +106,6 @@ public class INodeDirectoryWithQuota extends INodeDirectory { return counts; } - @Override - public Content.CountsMap computeContentSummary( - final Content.CountsMap countsMap) { - final long original = countsMap.sum(Content.DISKSPACE); - super.computeContentSummary(countsMap); - checkDiskspace(countsMap.sum(Content.DISKSPACE) - original); - return countsMap; - } - @Override public Content.Counts computeContentSummary( final Content.Counts counts) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java index 655438ee45c..ef1bcaeda4d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java @@ -31,10 +31,9 @@ import org.apache.hadoop.hdfs.server.blockmanagement.BlockCollection; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; -import org.apache.hadoop.hdfs.server.namenode.Content.CountsMap.Key; +import org.apache.hadoop.hdfs.server.namenode.snapshot.FileDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiff; -import org.apache.hadoop.hdfs.server.namenode.snapshot.FileDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.Util; import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeFileWithSnapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; @@ -359,15 +358,6 @@ public class INodeFile extends INodeWithAdditionalFields implements BlockCollect return counts; } - - @Override - public final Content.CountsMap computeContentSummary( - final Content.CountsMap countsMap) { - computeContentSummary4Snapshot(countsMap.getCounts(Key.SNAPSHOT)); - computeContentSummary4Current(countsMap.getCounts(Key.CURRENT)); - return countsMap; - } - @Override public final Content.Counts computeContentSummary( final Content.Counts counts) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeMap.java index e0e4cdf49ee..18c8adf3fa8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeMap.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeMap.java @@ -22,7 +22,6 @@ import java.util.List; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; -import org.apache.hadoop.hdfs.server.namenode.Content.CountsMap; import org.apache.hadoop.hdfs.server.namenode.Quota.Counts; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.util.GSet; @@ -112,11 +111,6 @@ public class INodeMap { return null; } - @Override - public CountsMap computeContentSummary(CountsMap countsMap) { - return null; - } - @Override public Counts cleanSubtree(Snapshot snapshot, Snapshot prior, BlocksMapUpdateInfo collectedBlocks, List removedINodes) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java index ef5366ef2b1..865d2098477 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java @@ -269,12 +269,7 @@ public abstract class INodeReference extends INode { } @Override - public final Content.CountsMap computeContentSummary(Content.CountsMap countsMap) { - return referred.computeContentSummary(countsMap); - } - - @Override - public final Content.Counts computeContentSummary(Content.Counts counts) { + public Content.Counts computeContentSummary(Content.Counts counts) { return referred.computeContentSummary(counts); } @@ -462,6 +457,15 @@ public abstract class INodeReference extends INode { return lastSnapshotId; } + @Override + public final Content.Counts computeContentSummary(Content.Counts counts) { + //only count diskspace for WithName + final Quota.Counts q = Quota.Counts.newInstance(); + computeQuotaUsage(q, false, lastSnapshotId); + counts.add(Content.DISKSPACE, q.get(Quota.DISKSPACE)); + return counts; + } + @Override public final Quota.Counts computeQuotaUsage(Quota.Counts counts, boolean useCache, int lastSnapshotId) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java index 96873fa5803..653ec8d3d97 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeSymlink.java @@ -24,7 +24,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; -import org.apache.hadoop.hdfs.server.namenode.Content.CountsMap.Key; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; /** @@ -97,13 +96,6 @@ public class INodeSymlink extends INodeWithAdditionalFields { return counts; } - @Override - public Content.CountsMap computeContentSummary( - final Content.CountsMap countsMap) { - computeContentSummary(countsMap.getCounts(Key.CURRENT)); - return countsMap; - } - @Override public Content.Counts computeContentSummary(final Content.Counts counts) { counts.add(Content.SYMLINK, 1); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java index 8db9a93c306..86bcf90047b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java @@ -37,7 +37,6 @@ import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffReportEntry; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffType; import org.apache.hadoop.hdfs.server.namenode.Content; -import org.apache.hadoop.hdfs.server.namenode.Content.CountsMap.Key; import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.INodeFile; @@ -347,16 +346,6 @@ public class INodeDirectorySnapshottable extends INodeDirectoryWithSnapshot { return counts; } - @Override - public Content.CountsMap computeContentSummary( - final Content.CountsMap countsMap) { - super.computeContentSummary(countsMap); - countsMap.getCounts(Key.SNAPSHOT).add(Content.SNAPSHOT, - snapshotsByNames.size()); - countsMap.getCounts(Key.CURRENT).add(Content.SNAPSHOTTABLE_DIRECTORY, 1); - return countsMap; - } - /** * Compute the difference between two snapshots (or a snapshot and the current * directory) of the directory. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java index 8310963c7e7..ca39e6f4ec4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffReportEntry; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffType; import org.apache.hadoop.hdfs.server.namenode.Content; -import org.apache.hadoop.hdfs.server.namenode.Content.CountsMap.Key; import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization; import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; @@ -819,14 +818,6 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { return counts; } - @Override - public Content.CountsMap computeContentSummary( - final Content.CountsMap countsMap) { - super.computeContentSummary(countsMap); - computeContentSummary4Snapshot(countsMap.getCounts(Key.SNAPSHOT)); - return countsMap; - } - @Override public Content.Counts computeContentSummary(final Content.Counts counts) { super.computeContentSummary(counts); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java index fff53fb5e05..667221d8574 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java @@ -353,13 +353,50 @@ public class Diff> { private static > List apply2Previous( final List previous, final List clist, final List dlist) { - final List current = new ArrayList(previous); - for(E d : dlist) { - current.remove(d); + // Assumptions: + // (A1) All lists are sorted. + // (A2) All elements in dlist must be in previous. + // (A3) All elements in clist must be not in tmp = previous - dlist. + final List tmp = new ArrayList(); + { + // tmp = previous - dlist + final Iterator i = previous.iterator(); + for(E deleted : dlist) { + E e = i.next(); //since dlist is non-empty, e must exist by (A2). + int cmp = 0; + for(; (cmp = e.compareTo(deleted.getKey())) < 0; e = i.next()) { + tmp.add(e); + } + Preconditions.checkState(cmp == 0); // check (A2) + } + for(; i.hasNext(); ) { + tmp.add(i.next()); + } } - for(E c : clist) { - final int i = search(current, c.getKey()); - current.add(-i - 1, c); + + final List current = new ArrayList(); + { + // current = tmp + clist + final Iterator tmpIterator = tmp.iterator(); + final Iterator cIterator = clist.iterator(); + + E t = tmpIterator.hasNext()? tmpIterator.next(): null; + E c = cIterator.hasNext()? cIterator.next(): null; + for(; t != null || c != null; ) { + final int cmp = c == null? 1 + : t == null? -1 + : c.compareTo(t.getKey()); + + if (cmp < 0) { + current.add(c); + c = cIterator.hasNext()? cIterator.next(): null; + } else if (cmp > 0) { + current.add(t); + t = tmpIterator.hasNext()? tmpIterator.next(): null; + } else { + throw new AssertionError("Violated assumption (A3)."); + } + } } return current; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestDiff.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestDiff.java index d44ea43f654..22fc8998106 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestDiff.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestDiff.java @@ -48,7 +48,7 @@ public class TestDiff { /** Test directory diff. */ @Test(timeout=60000) public void testDiff() throws Exception { - for(int startSize = 0; startSize <= 1000; startSize = nextStep(startSize)) { + for(int startSize = 0; startSize <= 10000; startSize = nextStep(startSize)) { for(int m = 0; m <= 10000; m = nextStep(m)) { runDiffTest(startSize, m); }