HDFS-12371. BlockVerificationFailures and BlocksVerified show up as 0 in Datanode JMX. Contributed by Hanisha Koneru.
(cherry picked from commit 6bf921a5c3152a307b5c0903056d73ce07775a08) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7d6bc56f Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7d6bc56f Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7d6bc56f Branch: refs/heads/b-2.7-HDFS-12371 Commit: 7d6bc56f16319e1e75397232d9fb3afc5b567735 Parents: 88d951e Author: Kihwal Lee <kih...@apache.org> Authored: Thu Sep 21 09:00:26 2017 -0500 Committer: Konstantin V Shvachko <s...@apache.org> Committed: Tue Jan 16 18:04:52 2018 -0800 ---------------------------------------------------------------------- .../org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java | 6 ++++++ 1 file changed, 6 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/7d6bc56f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java index a40cbc8..b63af29 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.server.datanode.BlockScanner.Conf; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi.BlockIterator; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; +import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics; import org.apache.hadoop.hdfs.util.DataTransferThrottler; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Time; @@ -81,6 +82,8 @@ public class VolumeScanner extends Thread { */ private final DataNode datanode; + private final DataNodeMetrics metrics; + /** * A reference to the volume that we're scanning. */ @@ -301,6 +304,7 @@ public class VolumeScanner extends Thread { VolumeScanner(Conf conf, DataNode datanode, FsVolumeReference ref) { this.conf = conf; this.datanode = datanode; + this.metrics = datanode.getMetrics(); this.ref = ref; this.volume = ref.getVolume(); ScanResultHandler handler; @@ -445,12 +449,14 @@ public class VolumeScanner extends Thread { throttler.setBandwidth(bytesPerSec); long bytesRead = blockSender.sendBlock(nullStream, null, throttler); resultHandler.handle(block, null); + metrics.incrBlocksVerified(); return bytesRead; } catch (IOException e) { resultHandler.handle(block, e); } finally { IOUtils.cleanup(null, blockSender); } + metrics.incrBlockVerificationFailures(); return -1; } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org