svn merge -c 1489708 from trunk for HDFS-4876. Fix the javadoc of FileWithSnapshot and move FileDiffList to FileWithSnapshot.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1489709 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2013-06-05 05:27:58 +00:00
parent 99faae2076
commit f974ce444d
7 changed files with 38 additions and 67 deletions

View File

@ -829,6 +829,9 @@ Release 2.1.0-beta - UNRELEASED
HDFS-4826. TestNestedSnapshots times out due to repeated slow edit log
flushes when running on virtualized disk. (Chris Nauroth via szetszwo)
HDFS-4876. Fix the javadoc of FileWithSnapshot and move FileDiffList to
FileWithSnapshot. (szetszwo)
Release 2.0.5-alpha - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -50,7 +50,7 @@ import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectorySnapshottable;
import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectoryWithSnapshot;
import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeFileUnderConstructionWithSnapshot;

View File

@ -23,17 +23,15 @@ import java.io.PrintWriter;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockCollection;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiff;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.Util;
import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeFileWithSnapshot;
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;

View File

@ -45,7 +45,7 @@ public class INodeMap {
}
/** Synchronized by external lock. */
private GSet<INode, INodeWithAdditionalFields> map;
private final GSet<INode, INodeWithAdditionalFields> map;
private INodeMap(GSet<INode, INodeWithAdditionalFields> map) {
Preconditions.checkArgument(map != null);

View File

@ -1,57 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.snapshot;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.INodeFileUnderConstruction;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiff;
/**
* A list of FileDiffs for storing snapshot data.
*/
@InterfaceAudience.Private
public class FileDiffList
extends AbstractINodeDiffList<INodeFile, FileDiff> {
@Override
FileDiff createDiff(Snapshot snapshot, INodeFile file) {
return new FileDiff(snapshot, file);
}
@Override
INodeFile createSnapshotCopy(INodeFile currentINode) {
if (currentINode instanceof INodeFileUnderConstructionWithSnapshot) {
final INodeFileUnderConstruction uc =
(INodeFileUnderConstruction) currentINode;
final INodeFileUnderConstruction copy = new INodeFileUnderConstruction(
uc, uc.getClientName(), uc.getClientMachine(), uc.getClientNode());
copy.setBlocks(null);
return copy;
} else {
final INodeFile copy = new INodeFile(currentINode);
copy.setBlocks(null);
return copy;
}
}
}

View File

@ -27,13 +27,12 @@ import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization;
import org.apache.hadoop.hdfs.server.namenode.INode.BlocksMapUpdateInfo;
import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.INodeFileUnderConstruction;
import org.apache.hadoop.hdfs.server.namenode.Quota;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotFSImageFormat.ReferenceMap;
/**
* {@link INodeFile} with a link to the next element.
* The link of all the snapshot files and the original file form a circular
* linked list so that all elements are accessible by any of the elements.
* An interface for {@link INodeFile} to support snapshot.
*/
@InterfaceAudience.Private
public interface FileWithSnapshot {
@ -44,7 +43,7 @@ public interface FileWithSnapshot {
/** The file size at snapshot creation time. */
private final long fileSize;
FileDiff(Snapshot snapshot, INodeFile file) {
private FileDiff(Snapshot snapshot, INodeFile file) {
super(snapshot, null, null);
fileSize = file.computeFileSize();
}
@ -119,6 +118,34 @@ public interface FileWithSnapshot {
}
}
/** A list of FileDiffs for storing snapshot data. */
public static class FileDiffList
extends AbstractINodeDiffList<INodeFile, FileDiff> {
@Override
FileDiff createDiff(Snapshot snapshot, INodeFile file) {
return new FileDiff(snapshot, file);
}
@Override
INodeFile createSnapshotCopy(INodeFile currentINode) {
if (currentINode instanceof INodeFileUnderConstructionWithSnapshot) {
final INodeFileUnderConstruction uc =
(INodeFileUnderConstruction) currentINode;
final INodeFileUnderConstruction copy = new INodeFileUnderConstruction(
uc, uc.getClientName(), uc.getClientMachine(), uc.getClientNode());
copy.setBlocks(null);
return copy;
} else {
final INodeFile copy = new INodeFile(currentINode);
copy.setBlocks(null);
return copy;
}
}
}
/** @return the {@link INodeFile} view of this object. */
public INodeFile asINodeFile();

View File

@ -34,7 +34,7 @@ import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.INodeReference;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiff;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.FileWithSnapshot.FileDiffList;
import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectoryWithSnapshot.DirectoryDiff;
import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectoryWithSnapshot.DirectoryDiffList;
import org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff;