diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index b7fbc231caa..080f0d4afeb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -816,6 +816,9 @@ Release 2.8.0 - UNRELEASED HDFS-8803. Move DfsClientConf to hdfs-client. (Mingliang Liu via wheat9) + HDFS-8917. Cleanup BlockInfoUnderConstruction from comments and tests. + (Zhe Zhang via jing9) + OPTIMIZATIONS HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfo.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfo.java index 94dac3590aa..659be564aea 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfo.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfo.java @@ -78,7 +78,6 @@ public abstract class BlockInfo extends Block /** * Copy construction. - * This is used to convert BlockInfoUnderConstruction * @param from BlockInfo to copy from. */ protected BlockInfo(BlockInfo from) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoContiguous.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoContiguous.java index eff89a80832..42934c3abb7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoContiguous.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoContiguous.java @@ -37,8 +37,7 @@ public class BlockInfoContiguous extends BlockInfo { /** * Copy construction. - * This is used to convert BlockReplicationInfoUnderConstruction - * @param from BlockReplicationInfo to copy from. + * @param from BlockInfoContiguous to copy from. */ protected BlockInfoContiguous(BlockInfoContiguous from) { super(from); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index aad7fec41f2..f2d05151683 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -2236,7 +2236,7 @@ public class BlockManager implements BlockStatsMXBean { * is fully replicated. *
  • If the reported replica is for a block currently marked "under * construction" in the NN, then it should be added to the - * BlockInfoUnderConstruction's list of replicas.
  • + * BlockUnderConstructionFeature's list of replicas. * * * @param storageInfo DatanodeStorageInfo that sent the report. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockUnderConstructionFeature.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockUnderConstructionFeature.java index a555cd68e76..de51b2ffd90 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockUnderConstructionFeature.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockUnderConstructionFeature.java @@ -151,7 +151,7 @@ public class BlockUnderConstructionFeature { public BlockUnderConstructionFeature(Block block, BlockUCState state, DatanodeStorageInfo[] targets) { assert getBlockUCState() != BlockUCState.COMPLETE : - "BlockInfoUnderConstruction cannot be in COMPLETE state"; + "BlockUnderConstructionFeature cannot be in COMPLETE state"; this.blockUCState = state; setExpectedLocations(block.getGenerationStamp(), targets); } @@ -241,7 +241,7 @@ public class BlockUnderConstructionFeature { blockRecoveryId = recoveryId; if (replicas.size() == 0) { NameNode.blockStateChangeLog.warn("BLOCK*" - + " BlockInfoUnderConstruction.initLeaseRecovery:" + + " BlockUnderConstructionFeature.initLeaseRecovery:" + " No blocks found, lease removed."); } boolean allLiveReplicasTriedAsPrimary = true; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileUnderConstructionFeature.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileUnderConstructionFeature.java index 26ee8f6e562..81ec255e70c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileUnderConstructionFeature.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileUnderConstructionFeature.java @@ -62,7 +62,7 @@ public class FileUnderConstructionFeature implements INode.Feature { + f.getFullPathName() + " is null when updating its length"; assert !lastBlock.isComplete() : "The last block for path " + f.getFullPathName() - + " is not a BlockInfoUnderConstruction when updating its length"; + + " is not under-construction when updating its length"; lastBlock.setNumBytes(lastBlockLength); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java index 823164d8821..5dd5bb19ba6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java @@ -693,7 +693,7 @@ public class INodeFile extends INodeWithAdditionalFields return 0; } final int last = blocks.length - 1; - //check if the last block is BlockInfoUnderConstruction + //check if the last block is under-construction long size = blocks[last].getNumBytes(); if (!blocks[last].isComplete()) { if (!includesLastUcBlock) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend.java index 402c944fee6..85d92c91b38 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend.java @@ -331,7 +331,7 @@ public class TestFileAppend{ //1st append does not add any data so that the last block remains full //and the last block in INodeFileUnderConstruction is a BlockInfo - //but not BlockInfoUnderConstruction. + //but does not have a BlockUnderConstructionFeature. fs2.append(p); //2nd append should get AlreadyBeingCreatedException @@ -369,7 +369,7 @@ public class TestFileAppend{ //1st append does not add any data so that the last block remains full //and the last block in INodeFileUnderConstruction is a BlockInfo - //but not BlockInfoUnderConstruction. + //but does not have a BlockUnderConstructionFeature. ((DistributedFileSystem) fs2).append(p, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockInfoUnderConstruction.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockUnderConstructionFeature.java similarity index 96% rename from hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockInfoUnderConstruction.java rename to hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockUnderConstructionFeature.java index c34747205dd..b47aac08244 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockInfoUnderConstruction.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockUnderConstructionFeature.java @@ -26,9 +26,9 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.junit.Test; /** - * This class provides tests for BlockInfoUnderConstruction class + * This class provides tests for {@link BlockUnderConstructionFeature} class */ -public class TestBlockInfoUnderConstruction { +public class TestBlockUnderConstructionFeature { @Test public void testInitializeBlockRecovery() throws Exception { DatanodeStorageInfo s1 = DFSTestUtil.createDatanodeStorageInfo("10.10.1.1", "s1"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java index b6709224a22..29d227294e5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java @@ -227,8 +227,8 @@ public class SnapshotTestHelper { line2 = line2.replaceAll("Quota\\[.*\\]", "Quota[]"); } - // skip the specific fields of BlockInfoUnderConstruction when the node - // is an INodeFileSnapshot or an INodeFileUnderConstructionSnapshot + // skip the specific fields of BlockUnderConstructionFeature when the + // node is an INodeFileSnapshot or INodeFileUnderConstructionSnapshot if (line1.contains("(INodeFileSnapshot)") || line1.contains("(INodeFileUnderConstructionSnapshot)")) { line1 = line1.replaceAll(