HDFS-16509. Fix decommission UnsupportedOperationException (#4077). Contributed by daimin.

(cherry picked from commit c65c383b7e)
This commit is contained in:
daimin 2022-04-14 11:07:06 +08:00 committed by Wei-Chiu Chuang
parent 52abc9f132
commit 0ef1a13f01
No known key found for this signature in database
GPG Key ID: B362E1C021854B9D
2 changed files with 21 additions and 2 deletions

View File

@ -390,8 +390,10 @@ private void processBlocksInternal(
// Remove the block from the list if it's no longer in the block map,
// e.g. the containing file has been deleted
if (blockManager.blocksMap.getStoredBlock(block) == null) {
LOG.trace("Removing unknown block {}", block);
it.remove();
if (pruneReliableBlocks) {
LOG.trace("Removing unknown block {}", block);
it.remove();
}
continue;
}

View File

@ -55,6 +55,7 @@
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
@ -65,6 +66,7 @@
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
@ -672,6 +674,21 @@ public void testDecommissionWithOpenfile()
fdos.close();
}
@Test(timeout = 20000)
public void testDecommissionWithUnknownBlock() throws IOException {
startCluster(1, 3);
FSNamesystem ns = getCluster().getNamesystem(0);
DatanodeManager datanodeManager = ns.getBlockManager().getDatanodeManager();
BlockInfo blk = new BlockInfoContiguous(new Block(1L), (short) 1);
DatanodeDescriptor dn = datanodeManager.getDatanodes().iterator().next();
dn.getStorageInfos()[0].addBlock(blk, blk);
datanodeManager.getDatanodeAdminManager().startDecommission(dn);
waitNodeState(dn, DatanodeInfo.AdminStates.DECOMMISSIONED);
}
private static String scanIntoString(final ByteArrayOutputStream baos) {
final TextStringBuilder sb = new TextStringBuilder();
final Scanner scanner = new Scanner(baos.toString());