From ac42519ade196516035cbcdf695bcf85ef70ae32 Mon Sep 17 00:00:00 2001 From: ZanderXu <15040255127@163.com> Date: Mon, 5 Sep 2022 19:35:48 +0800 Subject: [PATCH] HDFS-16593. Correct the BlocksRemoved metric on DataNode side (#4353). Contributed by ZanderXu. Signed-off-by: He Xiaoqiao --- .../org/apache/hadoop/hdfs/server/datanode/BPOfferService.java | 1 - .../hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java | 1 + .../hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java | 3 +++ 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java index a990e1915de..d660970c725 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BPOfferService.java @@ -741,7 +741,6 @@ class BPOfferService { // Exceptions caught here are not expected to be disk-related. throw e; } - dn.metrics.incrBlocksRemoved(toDelete.length); break; case DatanodeProtocol.DNA_CACHE: LOG.info("DatanodeCommand action: DNA_CACHE for " + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java index 633eeab03e9..adb90ab7ba8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java @@ -2350,6 +2350,7 @@ class FsDatasetImpl implements FsDatasetSpi { removing = volumeMap.remove(bpid, invalidBlks[i]); addDeletingBlock(bpid, removing.getBlockId()); LOG.debug("Block file {} is to be deleted", removing.getBlockURI()); + datanode.getMetrics().incrBlocksRemoved(1); if (removing instanceof ReplicaInPipeline) { ((ReplicaInPipeline) removing).releaseAllBytesReserved(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java index 1e4de751486..0805257a287 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsDatasetImpl.java @@ -70,6 +70,7 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.DataNodeVolumeMetrics; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi.FsVolumeReferences; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; +import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.io.MultipleIOException; import org.apache.hadoop.test.GenericTestUtils; @@ -226,6 +227,8 @@ public class TestFsDatasetImpl { final ShortCircuitRegistry shortCircuitRegistry = new ShortCircuitRegistry(conf); when(datanode.getShortCircuitRegistry()).thenReturn(shortCircuitRegistry); + DataNodeMetrics dataNodeMetrics = DataNodeMetrics.create(conf, "mockName"); + when(datanode.getMetrics()).thenReturn(dataNodeMetrics); createStorageDirs(storage, conf, NUM_INIT_VOLUMES); dataset = new FsDatasetImpl(datanode, storage, conf);