Repository: hadoop
Updated Branches:
  refs/heads/branch-3.0 1e5597227 -> 9bd1f364a


HDFS-12371. BlockVerificationFailures and BlocksVerified show up as 0 in 
Datanode JMX. Contributed by Hanisha Koneru.

(cherry picked from commit 6bf921a5c3152a307b5c0903056d73ce07775a08)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9bd1f364
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9bd1f364
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9bd1f364

Branch: refs/heads/branch-3.0
Commit: 9bd1f364a70974ec1cd4ac94d49e960cc6d6c336
Parents: 1e55972
Author: Kihwal Lee <kih...@apache.org>
Authored: Thu Sep 21 08:48:37 2017 -0500
Committer: Kihwal Lee <kih...@apache.org>
Committed: Thu Sep 21 08:48:37 2017 -0500

----------------------------------------------------------------------
 .../org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java  | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9bd1f364/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java
index 8b29fce..181ef80 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java
@@ -37,6 +37,7 @@ import 
org.apache.hadoop.hdfs.server.datanode.BlockScanner.Conf;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
 import 
org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi.BlockIterator;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
+import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
 import org.apache.hadoop.hdfs.util.DataTransferThrottler;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.Time;
@@ -81,6 +82,8 @@ public class VolumeScanner extends Thread {
    */
   private final DataNode datanode;
 
+  private final DataNodeMetrics metrics;
+
   /**
    * A reference to the volume that we're scanning.
    */
@@ -299,6 +302,7 @@ public class VolumeScanner extends Thread {
   VolumeScanner(Conf conf, DataNode datanode, FsVolumeReference ref) {
     this.conf = conf;
     this.datanode = datanode;
+    this.metrics = datanode.getMetrics();
     this.ref = ref;
     this.volume = ref.getVolume();
     ScanResultHandler handler;
@@ -443,12 +447,14 @@ public class VolumeScanner extends Thread {
       throttler.setBandwidth(bytesPerSec);
       long bytesRead = blockSender.sendBlock(nullStream, null, throttler);
       resultHandler.handle(block, null);
+      metrics.incrBlocksVerified();
       return bytesRead;
     } catch (IOException e) {
       resultHandler.handle(block, e);
     } finally {
       IOUtils.cleanup(null, blockSender);
     }
+    metrics.incrBlockVerificationFailures();
     return -1;
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to