HDFS-13300. Ozone: Remove DatanodeID dependency from HDSL and Ozone.
Contributed by Nanda kumar.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3440ca6e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3440ca6e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3440ca6e

Branch: refs/heads/HDFS-7240
Commit: 3440ca6e0c76bd50854eb5b72fa1486cfe4b6575
Parents: ab8fb01
Author: Anu Engineer <aengin...@apache.org>
Authored: Tue Mar 27 12:55:26 2018 -0700
Committer: Anu Engineer <aengin...@apache.org>
Committed: Tue Mar 27 12:55:26 2018 -0700

----------------------------------------------------------------------
 .../src/main/compose/cblock/docker-config       |   2 +-
 .../src/main/compose/ozone/docker-config        |   2 +-
 .../hadoop/hdfs/server/datanode/DataNode.java   |   7 -
 .../org/apache/hadoop/scm/XceiverClient.java    |   6 +-
 .../apache/hadoop/scm/XceiverClientHandler.java |   2 +-
 .../apache/hadoop/scm/XceiverClientRatis.java   |  10 +-
 .../java/org/apache/hadoop/hdsl/HdslUtils.java  |   2 +-
 .../hadoop/hdsl/protocol/DatanodeDetails.java   | 422 +++++++++++++++++++
 .../hadoop/hdsl/protocol/package-info.java      |  22 +
 .../org/apache/hadoop/scm/XceiverClientSpi.java |   4 +-
 .../scm/container/common/helpers/Pipeline.java  |   8 +-
 .../common/helpers/PipelineChannel.java         |  19 +-
 .../scm/storage/ContainerProtocolCalls.java     |  40 +-
 .../main/java/org/apache/ratis/RatisHelper.java |  19 +-
 .../main/proto/DatanodeContainerProtocol.proto  |   2 +-
 hadoop-hdsl/common/src/main/proto/hdsl.proto    |  16 +-
 .../hadoop/ozone/HdslDatanodeService.java       | 140 ++++++
 .../apache/hadoop/ozone/HdslServerPlugin.java   |  82 ----
 .../common/helpers/ContainerUtils.java          |  72 +---
 .../common/impl/ContainerManagerImpl.java       |  16 +-
 .../common/interfaces/ContainerManager.java     |   6 +-
 .../statemachine/DatanodeStateMachine.java      |  26 +-
 .../states/datanode/InitDatanodeState.java      |  32 +-
 .../states/datanode/RunningDatanodeState.java   |  82 +---
 .../states/endpoint/HeartbeatEndpointTask.java  |  45 +-
 .../states/endpoint/RegisterEndpointTask.java   |  43 +-
 .../common/transport/server/XceiverServer.java  |   4 +-
 .../server/ratis/XceiverServerRatis.java        |  21 +-
 .../container/ozoneimpl/OzoneContainer.java     |  13 +-
 .../StorageContainerDatanodeProtocol.java       |  10 +-
 .../protocol/StorageContainerNodeProtocol.java  |  11 +-
 ...rDatanodeProtocolClientSideTranslatorPB.java |  16 +-
 ...rDatanodeProtocolServerSideTranslatorPB.java |   8 +-
 .../StorageContainerDatanodeProtocol.proto      |  28 +-
 .../ozone/container/common/ScmTestMock.java     |  24 +-
 .../common/TestDatanodeStateMachine.java        |  32 +-
 .../ozone/scm/StorageContainerManager.java      |  43 +-
 .../block/DatanodeDeletedBlockTransactions.java |  26 +-
 .../scm/block/SCMBlockDeletingService.java      |  17 +-
 .../ozone/scm/container/ContainerMapping.java   |   2 +-
 .../scm/container/closer/ContainerCloser.java   |   8 +-
 .../algorithms/ContainerPlacementPolicy.java    |   4 +-
 .../placement/algorithms/SCMCommonPolicy.java   |  29 +-
 .../SCMContainerPlacementCapacity.java          |  32 +-
 .../algorithms/SCMContainerPlacementRandom.java |  12 +-
 .../replication/ContainerSupervisor.java        |  13 +-
 .../container/replication/InProgressPool.java   |  31 +-
 .../hadoop/ozone/scm/node/CommandQueue.java     |  20 +-
 .../ozone/scm/node/HeartbeatQueueItem.java      |  22 +-
 .../hadoop/ozone/scm/node/NodeManager.java      |  25 +-
 .../hadoop/ozone/scm/node/NodePoolManager.java  |  12 +-
 .../hadoop/ozone/scm/node/SCMNodeManager.java   | 155 ++++---
 .../ozone/scm/node/SCMNodePoolManager.java      |  41 +-
 .../ozone/scm/pipelines/PipelineManager.java    |   8 +-
 .../ozone/scm/pipelines/PipelineSelector.java   |  23 +-
 .../scm/pipelines/ratis/RatisManagerImpl.java   |  19 +-
 .../standalone/StandaloneManagerImpl.java       |  18 +-
 .../ozone/container/common/TestEndPoint.java    |  49 ++-
 .../placement/TestContainerPlacement.java       |  24 +-
 .../replication/TestContainerSupervisor.java    |  15 +-
 .../ReplicationDatanodeStateManager.java        |   8 +-
 .../testutils/ReplicationNodeManagerMock.java   |  48 ++-
 .../ReplicationNodePoolManagerMock.java         |  23 +-
 .../org/apache/hadoop/ozone/scm/TestUtils.java  |  58 +--
 .../ozone/scm/block/TestBlockManager.java       |   8 +-
 .../ozone/scm/block/TestDeletedBlockLog.java    |  44 +-
 .../ozone/scm/container/MockNodeManager.java    | 105 ++---
 .../scm/container/TestContainerMapping.java     |  19 +-
 .../container/closer/TestContainerCloser.java   |  20 +-
 .../ozone/scm/node/TestContainerPlacement.java  |  11 +-
 .../hadoop/ozone/scm/node/TestNodeManager.java  | 227 +++++-----
 .../ozone/scm/node/TestSCMNodePoolManager.java  |  31 +-
 .../scm/cli/container/InfoContainerHandler.java |   4 +-
 .../hadoop/ozone/web/utils/OzoneUtils.java      |   4 +-
 .../hadoop/ozone/MiniOzoneClassicCluster.java   |  34 +-
 .../hadoop/ozone/MiniOzoneTestHelper.java       |   6 +-
 .../apache/hadoop/ozone/RatisTestHelper.java    |   3 +-
 .../hadoop/ozone/TestMiniOzoneCluster.java      |  98 ++---
 .../ozone/TestStorageContainerManager.java      |   3 +-
 .../TestStorageContainerManagerHelper.java      |  11 +-
 .../ozone/container/ContainerTestHelper.java    |  60 +--
 .../common/TestBlockDeletingService.java        |   4 +-
 .../TestContainerDeletionChoosingPolicy.java    |  12 +-
 .../common/impl/TestContainerPersistence.java   |   4 +-
 .../TestCloseContainerHandler.java              |   5 +-
 .../container/metrics/TestContainerMetrics.java |   5 +-
 .../container/ozoneimpl/TestOzoneContainer.java |   5 +-
 .../ozoneimpl/TestOzoneContainerRatis.java      |   5 +-
 .../container/ozoneimpl/TestRatisManager.java   |  14 +-
 .../container/server/TestContainerServer.java   |  25 +-
 .../ksm/TestKeySpaceManagerRestInterface.java   |   2 +-
 .../hadoop/ozone/ozShell/TestOzoneShell.java    |   2 +-
 .../hadoop/ozone/scm/TestContainerSQLCli.java   |  12 +-
 .../org/apache/hadoop/ozone/scm/TestSCMCli.java |  11 +-
 .../apache/hadoop/ozone/scm/TestSCMMetrics.java |  31 +-
 .../ozone/web/TestDistributedOzoneVolumes.java  |   2 +-
 .../hadoop/ozone/web/TestLocalOzoneVolumes.java |   2 +-
 .../hadoop/ozone/web/TestOzoneWebAccess.java    |   2 +-
 .../hadoop/ozone/web/client/TestBuckets.java    |   2 +-
 .../hadoop/ozone/web/client/TestKeys.java       |   4 +-
 .../ozone/web/client/TestOzoneClient.java       |   2 +-
 .../hadoop/ozone/web/client/TestVolume.java     |   2 +-
 hadoop-ozone/objectstore-service/pom.xml        |   5 +
 .../hadoop/ozone/web/ObjectStoreRestPlugin.java |  49 +--
 .../hadoop/ozone/ksm/KeySpaceManager.java       |   3 +-
 .../org/apache/hadoop/ozone/scm/cli/SQLCLI.java |  52 ++-
 106 files changed, 1768 insertions(+), 1261 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-dist/src/main/compose/cblock/docker-config
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/cblock/docker-config 
b/hadoop-dist/src/main/compose/cblock/docker-config
index 95917fd..da0c2ac 100644
--- a/hadoop-dist/src/main/compose/cblock/docker-config
+++ b/hadoop-dist/src/main/compose/cblock/docker-config
@@ -27,7 +27,7 @@ OZONE-SITE.XML_ozone.scm.client.address=scm
 OZONE-SITE.XML_dfs.cblock.jscsi.cblock.server.address=cblock
 OZONE-SITE.XML_dfs.cblock.scm.ipaddress=scm
 OZONE-SITE.XML_dfs.cblock.service.leveldb.path=/tmp
-HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.HdslServerPlugin,org.apache.hadoop.ozone.web.ObjectStoreRestPlugin
+HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HdslDatanodeService
 HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000
 HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode
 HDFS-SITE.XML_rpc.metrics.quantile.enable=true

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-dist/src/main/compose/ozone/docker-config
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozone/docker-config 
b/hadoop-dist/src/main/compose/ozone/docker-config
index 20c1e30..d297b19 100644
--- a/hadoop-dist/src/main/compose/ozone/docker-config
+++ b/hadoop-dist/src/main/compose/ozone/docker-config
@@ -27,7 +27,7 @@ HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000
 HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode
 HDFS-SITE.XML_rpc.metrics.quantile.enable=true
 HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
-HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.HdslServerPlugin,org.apache.hadoop.ozone.web.ObjectStoreRestPlugin
+HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HdslDatanodeService
 LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
 LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index b757fd7..e04673e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -1577,13 +1577,6 @@ public class DataNode extends ReconfigurableBase
           + bpRegistration.getDatanodeUuid()
           + ". Expecting " + storage.getDatanodeUuid());
     }
-
-    for (ServicePlugin plugin : plugins) {
-      if (plugin instanceof DataNodeServicePlugin) {
-        ((DataNodeServicePlugin) plugin)
-            .onDatanodeSuccessfulNamenodeRegisration(bpRegistration);
-      }
-    }
     registerBlockPoolWithSecretManager(bpRegistration, blockPoolId);
   }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java 
b/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java
index 6897330..06b1e99 100644
--- a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java
+++ b/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java
@@ -28,9 +28,9 @@ import io.netty.channel.socket.nio.NioSocketChannel;
 import io.netty.handler.logging.LogLevel;
 import io.netty.handler.logging.LoggingHandler;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.ozone.client.OzoneClientUtils;
 import org.apache.hadoop.scm.container.common.helpers.Pipeline;
@@ -84,7 +84,7 @@ public class XceiverClient extends XceiverClientSpi {
         .channel(NioSocketChannel.class)
         .handler(new LoggingHandler(LogLevel.INFO))
         .handler(new XceiverClientInitializer(this.pipeline, semaphore));
-    DatanodeID leader = this.pipeline.getLeader();
+    DatanodeDetails leader = this.pipeline.getLeader();
 
     // read port from the data node, on failure use default configured
     // port.
@@ -174,7 +174,7 @@ public class XceiverClient extends XceiverClientSpi {
    * @param datanodes - Datanodes
    */
   @Override
-  public void createPipeline(String pipelineID, List<DatanodeID> datanodes)
+  public void createPipeline(String pipelineID, List<DatanodeDetails> 
datanodes)
       throws IOException {
     // For stand alone pipeline, there is no notion called setup pipeline.
     return;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java
 
b/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java
index e576d0e..4b2d6c4 100644
--- 
a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java
+++ 
b/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java
@@ -154,7 +154,7 @@ public class XceiverClientHandler extends
 
     // Setting the datanode ID in the commands, so that we can distinguish
     // commands when the cluster simulator is running.
-    if(!request.hasDatanodeID()) {
+    if(!request.hasDatanodeUuid()) {
       throw new IllegalArgumentException("Invalid Datanode ID");
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java
 
b/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java
index c9b058a..084e3e5 100644
--- 
a/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java
+++ 
b/hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java
@@ -21,10 +21,10 @@ package org.apache.hadoop.scm;
 import com.google.common.base.Preconditions;
 import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import 
org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
 import 
org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.ozone.client.OzoneClientUtils;
 import org.apache.hadoop.scm.container.common.helpers.Pipeline;
 import org.apache.ratis.RatisHelper;
@@ -84,7 +84,7 @@ public final class XceiverClientRatis extends 
XceiverClientSpi {
   /**
    * {@inheritDoc}
    */
-  public void createPipeline(String clusterId, List<DatanodeID> datanodes)
+  public void createPipeline(String clusterId, List<DatanodeDetails> datanodes)
       throws IOException {
     RaftGroup group = RatisHelper.newRaftGroup(datanodes);
     LOG.debug("initializing pipeline:{} with nodes:{}", clusterId,
@@ -102,14 +102,14 @@ public final class XceiverClientRatis extends 
XceiverClientSpi {
     return HdslProtos.ReplicationType.RATIS;
   }
 
-  private void reinitialize(List<DatanodeID> datanodes, RaftGroup group)
+  private void reinitialize(List<DatanodeDetails> datanodes, RaftGroup group)
       throws IOException {
     if (datanodes.isEmpty()) {
       return;
     }
 
     IOException exception = null;
-    for (DatanodeID d : datanodes) {
+    for (DatanodeDetails d : datanodes) {
       try {
         reinitialize(d, group);
       } catch (IOException ioe) {
@@ -133,7 +133,7 @@ public final class XceiverClientRatis extends 
XceiverClientSpi {
    * @param group    - Raft group
    * @throws IOException - on Failure.
    */
-  private void reinitialize(DatanodeID datanode, RaftGroup group)
+  private void reinitialize(DatanodeDetails datanode, RaftGroup group)
       throws IOException {
     final RaftPeer p = RatisHelper.toRaftPeer(datanode);
     try (RaftClient client = RatisHelper.newRaftClient(rpcType, p)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java
index 68ebcd8..6446618 100644
--- a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java
+++ b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java
@@ -255,7 +255,7 @@ public class HdslUtils {
    * @param conf - Configuration
    * @return the path of datanode id as string
    */
-  public static String getDatanodeIDPath(Configuration conf) {
+  public static String getDatanodeIdFilePath(Configuration conf) {
     String dataNodeIDPath = conf.get(ScmConfigKeys.OZONE_SCM_DATANODE_ID);
     if (dataNodeIDPath == null) {
       String metaPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java
 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java
new file mode 100644
index 0000000..7049c30
--- /dev/null
+++ 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java
@@ -0,0 +1,422 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdsl.protocol;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
+
+import java.util.UUID;
+
+/**
+ * DatanodeDetails class contains details about DataNode like:
+ * - UUID of the DataNode.
+ * - IP and Hostname details.
+ * - Port details to which the DataNode will be listening.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class DatanodeDetails implements Comparable<DatanodeDetails> {
+
+  /**
+   * DataNode's unique identifier in the cluster.
+   */
+  private final UUID uuid;
+
+  private String ipAddress;
+  private String hostName;
+  private Integer infoPort;
+  private Integer infoSecurePort;
+  private Integer containerPort;
+  private Integer ratisPort;
+  private Integer ozoneRestPort;
+
+
+  /**
+   * Constructs DatanodeDetails instance. DatanodeDetails.Builder is used
+   * for instantiating DatanodeDetails.
+   * @param uuid DataNode's UUID
+   * @param ipAddress IP Address of this DataNode
+   * @param hostName DataNode's hostname
+   * @param infoPort HTTP Port
+   * @param infoSecurePort HTTPS Port
+   * @param containerPort Container Port
+   * @param ratisPort Ratis Port
+   * @param ozoneRestPort Rest Port
+   */
+  private DatanodeDetails(
+      String uuid, String ipAddress, String hostName, Integer infoPort,
+      Integer infoSecurePort, Integer containerPort, Integer ratisPort,
+      Integer ozoneRestPort) {
+    this.uuid = UUID.fromString(uuid);
+    this.ipAddress = ipAddress;
+    this.hostName = hostName;
+    this.infoPort = infoPort;
+    this.infoSecurePort = infoSecurePort;
+    this.containerPort = containerPort;
+    this.ratisPort = ratisPort;
+    this.ozoneRestPort = ozoneRestPort;
+  }
+
+  /**
+   * Returns the DataNode UUID.
+   *
+   * @return UUID of DataNode
+   */
+  public UUID getUuid() {
+    return uuid;
+  }
+
+  /**
+   * Returns the string representation of DataNode UUID.
+   *
+   * @return UUID of DataNode
+   */
+  public String getUuidString() {
+    return uuid.toString();
+  }
+
+  /**
+   * Sets the IP address of Datanode.
+   *
+   * @param ip IP Address
+   */
+  public void setIpAddress(String ip) {
+    this.ipAddress = ip;
+  }
+
+  /**
+   * Returns IP address of DataNode.
+   *
+   * @return IP address
+   */
+  public String getIpAddress() {
+    return ipAddress;
+  }
+
+  /**
+   * Sets the Datanode hostname.
+   *
+   * @param host hostname
+   */
+  public void setHostName(String host) {
+    this.hostName = host;
+  }
+
+  /**
+   * Returns Hostname of DataNode.
+   *
+   * @return Hostname
+   */
+  public String getHostName() {
+    return hostName;
+  }
+
+  /**
+   * Sets the InfoPort.
+   * @param port InfoPort
+   */
+  public void setInfoPort(int port) {
+    infoPort = port;
+  }
+
+  /**
+   * Returns DataNodes Info Port.
+   *
+   * @return InfoPort
+   */
+  public int getInfoPort() {
+    return infoPort;
+  }
+
+  /**
+   * Sets the InfoSecurePort.
+   *
+   * @param port InfoSecurePort
+   */
+  public void setInfoSecurePort(int port) {
+    infoSecurePort = port;
+  }
+
+  /**
+   * Returns DataNodes Secure Info Port.
+   *
+   * @return InfoSecurePort
+   */
+  public int getInfoSecurePort() {
+    return infoSecurePort;
+  }
+
+  /**
+   * Sets the Container Port.
+   * @param port ContainerPort
+   */
+  public void setContainerPort(int port) {
+    containerPort = port;
+  }
+
+  /**
+   * Returns standalone container Port.
+   *
+   * @return Container Port
+   */
+  public int getContainerPort() {
+    return containerPort;
+  }
+
+  /**
+   * Sets Ratis Port.
+   * @param port RatisPort
+   */
+  public void setRatisPort(int port) {
+    ratisPort = port;
+  }
+
+
+  /**
+   * Returns Ratis Port.
+   * @return Ratis Port
+   */
+  public int getRatisPort() {
+    return ratisPort;
+  }
+
+
+  /**
+   * Sets OzoneRestPort.
+   * @param port OzoneRestPort
+   */
+  public void setOzoneRestPort(int port) {
+    ozoneRestPort = port;
+  }
+
+  /**
+   * Returns Ozone Rest Port.
+   * @return OzoneRestPort
+   */
+  public int getOzoneRestPort() {
+    return ozoneRestPort;
+  }
+
+  /**
+   * Returns a DatanodeDetails from the protocol buffers.
+   *
+   * @param datanodeDetailsProto - protoBuf Message
+   * @return DatanodeDetails
+   */
+  public static DatanodeDetails getFromProtoBuf(
+      HdslProtos.DatanodeDetailsProto datanodeDetailsProto) {
+    DatanodeDetails.Builder builder = newBuilder();
+    builder.setUuid(datanodeDetailsProto.getUuid());
+    if (datanodeDetailsProto.hasIpAddress()) {
+      builder.setIpAddress(datanodeDetailsProto.getIpAddress());
+    }
+    if (datanodeDetailsProto.hasHostName()) {
+      builder.setHostName(datanodeDetailsProto.getHostName());
+    }
+    if (datanodeDetailsProto.hasInfoPort()) {
+        builder.setInfoPort(datanodeDetailsProto.getInfoPort());
+    }
+    if (datanodeDetailsProto.hasInfoSecurePort()) {
+        builder.setInfoSecurePort(datanodeDetailsProto.getInfoSecurePort());
+    }
+    if (datanodeDetailsProto.hasContainerPort()) {
+      builder.setContainerPort(datanodeDetailsProto.getContainerPort());
+    }
+    if (datanodeDetailsProto.hasRatisPort()) {
+      builder.setRatisPort(datanodeDetailsProto.getRatisPort());
+    }
+    if (datanodeDetailsProto.hasOzoneRestPort()) {
+      builder.setOzoneRestPort(datanodeDetailsProto.getOzoneRestPort());
+    }
+    return builder.build();
+  }
+
+  /**
+   * Returns a DatanodeDetails protobuf message from a datanode ID.
+   * @return Hdsl.DatanodeDetailsProto
+   */
+  public HdslProtos.DatanodeDetailsProto getProtoBufMessage() {
+    HdslProtos.DatanodeDetailsProto.Builder builder =
+        HdslProtos.DatanodeDetailsProto.newBuilder()
+            .setUuid(getUuidString());
+    if (ipAddress != null) {
+      builder.setIpAddress(ipAddress);
+    }
+    if (hostName != null) {
+      builder.setHostName(hostName);
+    }
+    if (infoPort != null) {
+      builder.setInfoPort(infoPort);
+    }
+    if (infoSecurePort != null) {
+      builder.setInfoSecurePort(infoSecurePort);
+    }
+    if (containerPort != null) {
+      builder.setContainerPort(containerPort);
+    }
+    if (ratisPort != null) {
+      builder.setRatisPort(ratisPort);
+    }
+    if (ozoneRestPort != null) {
+      builder.setOzoneRestPort(ozoneRestPort);
+    }
+    return builder.build();
+  }
+
+  @Override
+  public String toString() {
+    return uuid.toString() + "{" +
+        "ip: " +
+        ipAddress +
+        ", host: " +
+        hostName +
+        "}";
+  }
+
+  @Override
+  public int compareTo(DatanodeDetails that) {
+    return this.getUuid().compareTo(that.getUuid());
+  }
+
+  /**
+   * Returns DatanodeDetails.Builder instance.
+   *
+   * @return DatanodeDetails.Builder
+   */
+  public static Builder newBuilder() {
+    return new Builder();
+  }
+
+  /**
+   * Builder class for building DatanodeDetails.
+   */
+  public static class Builder {
+    private String id;
+    private String ipAddress;
+    private String hostName;
+    private Integer infoPort;
+    private Integer infoSecurePort;
+    private Integer containerPort;
+    private Integer ratisPort;
+    private Integer ozoneRestPort;
+
+    /**
+     * Sets the DatanodeUuid.
+     *
+     * @param uuid DatanodeUuid
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setUuid(String uuid) {
+      this.id = uuid;
+      return this;
+    }
+
+    /**
+     * Sets the IP address of DataNode.
+     *
+     * @param ip address
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setIpAddress(String ip) {
+      this.ipAddress = ip;
+      return this;
+    }
+
+    /**
+     * Sets the hostname of DataNode.
+     *
+     * @param host hostname
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setHostName(String host) {
+      this.hostName = host;
+      return this;
+    }
+
+    /**
+     * Sets the InfoPort.
+     *
+     * @param port InfoPort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setInfoPort(Integer port) {
+      this.infoPort = port;
+      return this;
+    }
+
+    /**
+     * Sets the Secure Info Port.
+     *
+     * @param port InfoSecurePort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setInfoSecurePort(Integer port) {
+      this.infoSecurePort = port;
+      return this;
+    }
+
+    /**
+     * Sets the ContainerPort.
+     *
+     * @param port ContainerPort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setContainerPort(Integer port) {
+      this.containerPort = port;
+      return this;
+    }
+
+    /**
+     * Sets the RatisPort.
+     *
+     * @param port RatisPort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setRatisPort(Integer port) {
+      this.ratisPort = port;
+      return this;
+    }
+
+    /**
+     * Sets the OzoneRestPort.
+     *
+     * @param port OzoneRestPort
+     * @return DatanodeDetails.Builder
+     */
+    public Builder setOzoneRestPort(Integer port) {
+      this.ozoneRestPort = port;
+      return this;
+    }
+
+    /**
+     * Builds and returns DatanodeDetails instance.
+     *
+     * @return DatanodeDetails
+     */
+    public DatanodeDetails build() {
+      Preconditions.checkNotNull(id);
+      return new DatanodeDetails(id, ipAddress, hostName,
+          infoPort, infoSecurePort, containerPort, ratisPort, ozoneRestPort);
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java
 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java
new file mode 100644
index 0000000..7fd2543
--- /dev/null
+++ 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package contains HDSL protocol related classes.
+ */
+package org.apache.hadoop.hdsl.protocol;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java
index d61e2c0..49817d3 100644
--- 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java
+++ 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java
@@ -19,11 +19,11 @@
 package org.apache.hadoop.scm;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
     .ContainerCommandRequestProto;
 import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
     .ContainerCommandResponseProto;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
 import org.apache.hadoop.scm.container.common.helpers.Pipeline;
 
@@ -118,7 +118,7 @@ public abstract class XceiverClientSpi implements Closeable 
{
    * @param datanodes - Datanodes
    */
   public abstract void createPipeline(String pipelineID,
-      List<DatanodeID> datanodes) throws IOException;
+      List<DatanodeDetails> datanodes) throws IOException;
 
   /**
    * Returns pipeline Type.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java
 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java
index 5a21761..9f2d1f4 100644
--- 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java
+++ 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java
@@ -29,8 +29,8 @@ import 
com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
 import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -100,7 +100,7 @@ public class Pipeline {
    * @return First Machine.
    */
   @JsonIgnore
-  public DatanodeID getLeader() {
+  public DatanodeDetails getLeader() {
     return pipelineChannel.getDatanodes().get(pipelineChannel.getLeaderID());
   }
 
@@ -120,7 +120,7 @@ public class Pipeline {
    * @return List of Machines.
    */
   @JsonIgnore
-  public List<DatanodeID> getMachines() {
+  public List<DatanodeDetails> getMachines() {
     return new ArrayList<>(pipelineChannel.getDatanodes().values());
   }
 
@@ -131,7 +131,7 @@ public class Pipeline {
    */
   public List<String> getDatanodeHosts() {
     List<String> dataHosts = new ArrayList<>();
-    for (DatanodeID id : pipelineChannel.getDatanodes().values()) {
+    for (DatanodeDetails id : pipelineChannel.getDatanodes().values()) {
       dataHosts.add(id.getHostName());
     }
     return dataHosts;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java
 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java
index cf15778..1937968 100644
--- 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java
+++ 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java
@@ -19,8 +19,7 @@ package org.apache.hadoop.scm.container.common.helpers;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.google.common.base.Preconditions;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType;
@@ -36,7 +35,7 @@ public class PipelineChannel {
   @JsonIgnore
   private String leaderID;
   @JsonIgnore
-  private Map<String, DatanodeID> datanodes;
+  private Map<String, DatanodeDetails> datanodes;
   private LifeCycleState lifeCycleState;
   private ReplicationType type;
   private ReplicationFactor factor;
@@ -57,7 +56,7 @@ public class PipelineChannel {
     return leaderID;
   }
 
-  public Map<String, DatanodeID> getDatanodes() {
+  public Map<String, DatanodeDetails> getDatanodes() {
     return datanodes;
   }
 
@@ -77,15 +76,16 @@ public class PipelineChannel {
     return name;
   }
 
-  public void addMember(DatanodeID dataNodeId) {
-    datanodes.put(dataNodeId.getDatanodeUuid(), dataNodeId);
+  public void addMember(DatanodeDetails datanodeDetails) {
+    datanodes.put(datanodeDetails.getUuid().toString(),
+        datanodeDetails);
   }
 
   @JsonIgnore
   public HdslProtos.PipelineChannel getProtobufMessage() {
     HdslProtos.PipelineChannel.Builder builder =
         HdslProtos.PipelineChannel.newBuilder();
-    for (DatanodeID datanode : datanodes.values()) {
+    for (DatanodeDetails datanode : datanodes.values()) {
       builder.addMembers(datanode.getProtoBufMessage());
     }
     builder.setLeaderID(leaderID);
@@ -113,8 +113,9 @@ public class PipelineChannel {
             transportProtos.getFactor(),
             transportProtos.getName());
 
-    for (HdfsProtos.DatanodeIDProto dataID : transportProtos.getMembersList()) 
{
-      pipelineChannel.addMember(DatanodeID.getFromProtoBuf(dataID));
+    for (HdslProtos.DatanodeDetailsProto dataID :
+        transportProtos.getMembersList()) {
+      pipelineChannel.addMember(DatanodeDetails.getFromProtoBuf(dataID));
     }
     return pipelineChannel;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java
 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java
index d1c0079..174f1c1 100644
--- 
a/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java
+++ 
b/hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java
@@ -82,12 +82,12 @@ public final class ContainerProtocolCalls  {
         .newBuilder()
         .setPipeline(xceiverClient.getPipeline().getProtobufMessage())
         .setKeyData(containerKeyData);
-    String id = xceiverClient.getPipeline().getLeader().getDatanodeUuid();
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto request = ContainerCommandRequestProto
         .newBuilder()
         .setCmdType(Type.GetKey)
         .setTraceID(traceID)
-        .setDatanodeID(id)
+        .setDatanodeUuid(id)
         .setGetKey(readKeyRequest)
         .build();
     ContainerCommandResponseProto response = 
xceiverClient.sendCommand(request);
@@ -109,12 +109,12 @@ public final class ContainerProtocolCalls  {
         .newBuilder()
         .setPipeline(xceiverClient.getPipeline().getProtobufMessage())
         .setKeyData(containerKeyData);
-    String id = xceiverClient.getPipeline().getLeader().getDatanodeUuid();
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto request = ContainerCommandRequestProto
         .newBuilder()
         .setCmdType(Type.PutKey)
         .setTraceID(traceID)
-        .setDatanodeID(id)
+        .setDatanodeUuid(id)
         .setPutKey(createKeyRequest)
         .build();
     ContainerCommandResponseProto response = 
xceiverClient.sendCommand(request);
@@ -139,12 +139,12 @@ public final class ContainerProtocolCalls  {
         .setPipeline(xceiverClient.getPipeline().getProtobufMessage())
         .setKeyName(key)
         .setChunkData(chunk);
-    String id = xceiverClient.getPipeline().getLeader().getDatanodeUuid();
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto request = ContainerCommandRequestProto
         .newBuilder()
         .setCmdType(Type.ReadChunk)
         .setTraceID(traceID)
-        .setDatanodeID(id)
+        .setDatanodeUuid(id)
         .setReadChunk(readChunkRequest)
         .build();
     ContainerCommandResponseProto response = 
xceiverClient.sendCommand(request);
@@ -171,12 +171,12 @@ public final class ContainerProtocolCalls  {
         .setKeyName(key)
         .setChunkData(chunk)
         .setData(data);
-    String id = xceiverClient.getPipeline().getLeader().getDatanodeUuid();
+    String id = xceiverClient.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto request = ContainerCommandRequestProto
         .newBuilder()
         .setCmdType(Type.WriteChunk)
         .setTraceID(traceID)
-        .setDatanodeID(id)
+        .setDatanodeUuid(id)
         .setWriteChunk(writeChunkRequest)
         .build();
     ContainerCommandResponseProto response = 
xceiverClient.sendCommand(request);
@@ -220,12 +220,12 @@ public final class ContainerProtocolCalls  {
             .setKey(createKeyRequest).setData(ByteString.copyFrom(data))
             .build();
 
-    String id = client.getPipeline().getLeader().getDatanodeUuid();
+    String id = client.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto request =
         ContainerCommandRequestProto.newBuilder()
             .setCmdType(Type.PutSmallFile)
             .setTraceID(traceID)
-            .setDatanodeID(id)
+            .setDatanodeUuid(id)
             .setPutSmallFile(putSmallFileRequest)
             .build();
     ContainerCommandResponseProto response = client.sendCommand(request);
@@ -249,12 +249,12 @@ public final class ContainerProtocolCalls  {
     createRequest.setPipeline(client.getPipeline().getProtobufMessage());
     createRequest.setContainerData(containerData.build());
 
-    String id = client.getPipeline().getLeader().getDatanodeUuid();
+    String id = client.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto.Builder request =
         ContainerCommandRequestProto.newBuilder();
     request.setCmdType(ContainerProtos.Type.CreateContainer);
     request.setCreateContainer(createRequest);
-    request.setDatanodeID(id);
+    request.setDatanodeUuid(id);
     request.setTraceID(traceID);
     ContainerCommandResponseProto response = client.sendCommand(
         request.build());
@@ -276,13 +276,13 @@ public final class ContainerProtocolCalls  {
     deleteRequest.setName(client.getPipeline().getContainerName());
     deleteRequest.setPipeline(client.getPipeline().getProtobufMessage());
     deleteRequest.setForceDelete(force);
-    String id = client.getPipeline().getLeader().getDatanodeUuid();
+    String id = client.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto.Builder request =
         ContainerCommandRequestProto.newBuilder();
     request.setCmdType(ContainerProtos.Type.DeleteContainer);
     request.setDeleteContainer(deleteRequest);
     request.setTraceID(traceID);
-    request.setDatanodeID(id);
+    request.setDatanodeUuid(id);
     ContainerCommandResponseProto response =
         client.sendCommand(request.build());
     validateContainerResponse(response);
@@ -301,13 +301,13 @@ public final class ContainerProtocolCalls  {
         ContainerProtos.CloseContainerRequestProto.newBuilder();
     closeRequest.setPipeline(client.getPipeline().getProtobufMessage());
 
-    String id = client.getPipeline().getLeader().getDatanodeUuid();
+    String id = client.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto.Builder request =
         ContainerCommandRequestProto.newBuilder();
     request.setCmdType(Type.CloseContainer);
     request.setCloseContainer(closeRequest);
     request.setTraceID(traceID);
-    request.setDatanodeID(id);
+    request.setDatanodeUuid(id);
     ContainerCommandResponseProto response =
         client.sendCommand(request.build());
     validateContainerResponse(response);
@@ -327,12 +327,12 @@ public final class ContainerProtocolCalls  {
         ReadContainerRequestProto.newBuilder();
     readRequest.setName(containerName);
     readRequest.setPipeline(client.getPipeline().getProtobufMessage());
-    String id = client.getPipeline().getLeader().getDatanodeUuid();
+    String id = client.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto.Builder request =
         ContainerCommandRequestProto.newBuilder();
     request.setCmdType(Type.ReadContainer);
     request.setReadContainer(readRequest);
-    request.setDatanodeID(id);
+    request.setDatanodeUuid(id);
     request.setTraceID(traceID);
     ContainerCommandResponseProto response =
         client.sendCommand(request.build());
@@ -365,12 +365,12 @@ public final class ContainerProtocolCalls  {
         GetSmallFileRequestProto
             .newBuilder().setKey(getKey)
             .build();
-    String id = client.getPipeline().getLeader().getDatanodeUuid();
+    String id = client.getPipeline().getLeader().getUuidString();
     ContainerCommandRequestProto request = ContainerCommandRequestProto
         .newBuilder()
         .setCmdType(Type.GetSmallFile)
         .setTraceID(traceID)
-        .setDatanodeID(id)
+        .setDatanodeUuid(id)
         .setGetSmallFile(getSmallFileRequest)
         .build();
     ContainerCommandResponseProto response = client.sendCommand(request);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java
----------------------------------------------------------------------
diff --git a/hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java 
b/hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java
index de159ab..89a1cb5 100644
--- a/hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java
+++ b/hadoop-hdsl/common/src/main/java/org/apache/ratis/RatisHelper.java
@@ -18,7 +18,7 @@
 
 package org.apache.ratis;
 
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.scm.container.common.helpers.Pipeline;
 import org.apache.ratis.client.RaftClient;
@@ -47,19 +47,19 @@ import java.util.stream.Collectors;
 public interface RatisHelper {
   Logger LOG = LoggerFactory.getLogger(RatisHelper.class);
 
-  static String toRaftPeerIdString(DatanodeID id) {
-    return id.getIpAddr() + "_" + id.getRatisPort();
+  static String toRaftPeerIdString(DatanodeDetails id) {
+    return id.getUuidString() + "_" + id.getRatisPort();
   }
 
-  static String toRaftPeerAddressString(DatanodeID id) {
-    return id.getIpAddr() + ":" + id.getRatisPort();
+  static String toRaftPeerAddressString(DatanodeDetails id) {
+    return id.getIpAddress() + ":" + id.getRatisPort();
   }
 
-  static RaftPeerId toRaftPeerId(DatanodeID id) {
+  static RaftPeerId toRaftPeerId(DatanodeDetails id) {
     return RaftPeerId.valueOf(toRaftPeerIdString(id));
   }
 
-  static RaftPeer toRaftPeer(DatanodeID id) {
+  static RaftPeer toRaftPeer(DatanodeDetails id) {
     return new RaftPeer(toRaftPeerId(id), toRaftPeerAddressString(id));
   }
 
@@ -67,7 +67,8 @@ public interface RatisHelper {
     return toRaftPeers(pipeline.getMachines());
   }
 
-  static <E extends DatanodeID> List<RaftPeer> toRaftPeers(List<E> datanodes) {
+  static <E extends DatanodeDetails> List<RaftPeer> toRaftPeers(
+      List<E> datanodes) {
     return datanodes.stream().map(RatisHelper::toRaftPeer)
         .collect(Collectors.toList());
   }
@@ -85,7 +86,7 @@ public interface RatisHelper {
     return EMPTY_GROUP;
   }
 
-  static RaftGroup newRaftGroup(List<DatanodeID> datanodes) {
+  static RaftGroup newRaftGroup(List<DatanodeDetails> datanodes) {
     final List<RaftPeer> newPeers = datanodes.stream()
         .map(RatisHelper::toRaftPeer)
         .collect(Collectors.toList());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto 
b/hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto
index a3bef50..3060ada 100644
--- a/hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto
+++ b/hadoop-hdsl/common/src/main/proto/DatanodeContainerProtocol.proto
@@ -164,7 +164,7 @@ message ContainerCommandRequestProto {
   optional   PutSmallFileRequestProto putSmallFile = 16;
   optional   GetSmallFileRequestProto getSmallFile = 17;
   optional   CloseContainerRequestProto closeContainer = 18;
-  required   string datanodeID = 19;
+  required   string datanodeUuid = 19;
 }
 
 message ContainerCommandResponseProto {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/common/src/main/proto/hdsl.proto
----------------------------------------------------------------------
diff --git a/hadoop-hdsl/common/src/main/proto/hdsl.proto 
b/hadoop-hdsl/common/src/main/proto/hdsl.proto
index 394cd8c..a4baa97 100644
--- a/hadoop-hdsl/common/src/main/proto/hdsl.proto
+++ b/hadoop-hdsl/common/src/main/proto/hdsl.proto
@@ -28,11 +28,21 @@ option java_generic_services = true;
 option java_generate_equals_and_hash = true;
 package hadoop.hdsl;
 
-import "hdfs.proto";
+message DatanodeDetailsProto {
+    // TODO: make the port as a seperate proto message and use it here
+    required string uuid = 1;  // UUID assigned to the Datanode.
+    required string ipAddress = 2;     // IP address
+    required string hostName = 3;      // hostname
+    optional uint32 infoPort = 4;      // datanode http port
+    optional uint32 infoSecurePort = 5 [default = 0]; // datanode https port
+    optional uint32 containerPort = 6 [default = 0];  // Ozone stand_alone 
protocol
+    optional uint32 ratisPort = 7 [default = 0];      //Ozone ratis port
+    optional uint32 ozoneRestPort = 8 [default = 0];
+}
 
 message PipelineChannel {
     required string leaderID = 1;
-    repeated hadoop.hdfs.DatanodeIDProto members = 2;
+    repeated DatanodeDetailsProto members = 2;
     optional LifeCycleState state = 3 [default = OPEN];
     optional ReplicationType type = 4 [default = STAND_ALONE];
     optional ReplicationFactor factor = 5 [default = ONE];
@@ -82,7 +92,7 @@ enum QueryScope {
 }
 
 message Node {
-    required hadoop.hdfs.DatanodeIDProto nodeID = 1;
+    required DatanodeDetailsProto nodeID = 1;
     repeated NodeState nodeStates = 2;
 }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java
new file mode 100644
index 0000000..58c1d41
--- /dev/null
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslDatanodeService.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.ozone;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.UUID;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdfs.server.datanode.DataNodeServicePlugin;
+import org.apache.hadoop.hdsl.HdslUtils;
+import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
+import org.apache.hadoop.ozone.container.common.statemachine
+    .DatanodeStateMachine;
+import org.apache.hadoop.scm.ScmConfigKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Datanode service plugin to start the HDSL container services.
+ */
+public class HdslDatanodeService implements DataNodeServicePlugin {
+
+  private static final Logger LOG = LoggerFactory.getLogger(
+      HdslDatanodeService.class);
+
+  private final boolean isOzoneEnabled;
+
+  private Configuration conf;
+  private DatanodeDetails datanodeDetails;
+  private DatanodeStateMachine datanodeStateMachine;
+
+  public HdslDatanodeService() {
+    try {
+      OzoneConfiguration.activate();
+      this.conf = new OzoneConfiguration();
+      this.isOzoneEnabled = HdslUtils.isHdslEnabled(conf);
+      if (isOzoneEnabled) {
+        this.datanodeDetails = getDatanodeDetails(conf);
+        String hostname = DataNode.getHostName(conf);
+        String ip = InetAddress.getByName(hostname).getHostAddress();
+        this.datanodeDetails.setHostName(hostname);
+        this.datanodeDetails.setIpAddress(ip);
+      }
+    } catch (IOException e) {
+      throw new RuntimeException("Can't start the HDSL datanode plugin", e);
+    }
+  }
+
+  @Override
+  public void start(Object service) {
+    if (isOzoneEnabled) {
+      try {
+        DataNode dataNode = (DataNode) service;
+        datanodeDetails.setInfoPort(dataNode.getInfoPort());
+        datanodeDetails.setInfoSecurePort(dataNode.getInfoSecurePort());
+        datanodeStateMachine = new DatanodeStateMachine(datanodeDetails, conf);
+        datanodeStateMachine.startDaemon();
+      } catch (IOException e) {
+        throw new RuntimeException("Can't start the HDSL datanode plugin", e);
+      }
+    }
+  }
+
+  /**
+   * Returns ContainerNodeIDProto or null in case of Error.
+   *
+   * @return ContainerNodeIDProto
+   */
+  private static DatanodeDetails getDatanodeDetails(Configuration conf)
+      throws IOException {
+    String idFilePath = HdslUtils.getDatanodeIdFilePath(conf);
+    if (idFilePath == null || idFilePath.isEmpty()) {
+      LOG.error("A valid file path is needed for config setting {}",
+          ScmConfigKeys.OZONE_SCM_DATANODE_ID);
+      throw new IllegalArgumentException(ScmConfigKeys.OZONE_SCM_DATANODE_ID +
+          " must be defined. See" +
+          " https://wiki.apache.org/hadoop/Ozone#Configuration"; +
+          " for details on configuring Ozone.");
+    }
+
+    Preconditions.checkNotNull(idFilePath);
+    File idFile = new File(idFilePath);
+    if (idFile.exists()) {
+      return ContainerUtils.readDatanodeDetailsFrom(idFile);
+    } else {
+      // There is no datanode.id file, this might be the first time datanode
+      // is started.
+      String datanodeUuid = UUID.randomUUID().toString();
+      return DatanodeDetails.newBuilder().setUuid(datanodeUuid).build();
+    }
+  }
+
+  /**
+   *
+   * Return DatanodeDetails if set, return null otherwise.
+   *
+   * @return DatanodeDetails
+   */
+  public DatanodeDetails getDatanodeDetails() {
+    return datanodeDetails;
+  }
+
+  @InterfaceAudience.Private
+  public DatanodeStateMachine getDatanodeStateMachine() {
+    return datanodeStateMachine;
+  }
+
+  @Override
+  public void stop() {
+    if (datanodeStateMachine != null) {
+      datanodeStateMachine.stopDaemon();
+    }
+  }
+
+  @Override
+  public void close() throws IOException {
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslServerPlugin.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslServerPlugin.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslServerPlugin.java
deleted file mode 100644
index f8fea43..0000000
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/HdslServerPlugin.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-package org.apache.hadoop.ozone;
-
-import java.io.IOException;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.hdfs.server.datanode.DataNodeServicePlugin;
-import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
-import org.apache.hadoop.hdsl.HdslUtils;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
-import org.apache.hadoop.ozone.container.common.statemachine
-    .DatanodeStateMachine;
-
-/**
- * Datanode service plugin to start the HDSL container services.
- */
-public class HdslServerPlugin implements DataNodeServicePlugin {
-
-  private DatanodeStateMachine datanodeStateMachine;
-
-  private DataNode dataNode;
-
-  public HdslServerPlugin() {
-    OzoneConfiguration.activate();
-  }
-
-  @Override
-  public void start(Object service) {
-    dataNode = (DataNode) service;
-  }
-
-  @Override
-  public synchronized void onDatanodeSuccessfulNamenodeRegisration(
-      DatanodeRegistration dataNodeId) {
-    if (HdslUtils.isHdslEnabled(dataNode.getConf())) {
-      try {
-        if (datanodeStateMachine==null) {
-          datanodeStateMachine =
-              new DatanodeStateMachine(dataNodeId,
-                  dataNode.getConf());
-          datanodeStateMachine.startDaemon();
-        }
-      } catch (IOException e) {
-        throw new RuntimeException("Can't start the HDSL server plugin", e);
-      }
-
-    }
-  }
-
-  @Override
-  public void stop() {
-    if (datanodeStateMachine != null) {
-      datanodeStateMachine.stopDaemon();
-    }
-  }
-
-  @Override
-  public void close() throws IOException {
-  }
-
-  @InterfaceAudience.Private
-  public DatanodeStateMachine getDatanodeStateMachine() {
-    return datanodeStateMachine;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
index 276da1a..6f115be 100644
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
@@ -22,9 +22,9 @@ import com.google.common.base.Preconditions;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
+import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.container.common.impl.ContainerManagerImpl;
 import 
org.apache.hadoop.scm.container.common.helpers.StorageContainerException;
@@ -39,8 +39,6 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
 
 import static org.apache.commons.io.FilenameUtils.removeExtension;
 import static org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
@@ -397,15 +395,12 @@ public final class ContainerUtils {
   }
 
   /**
-   * Write datanode ID protobuf messages to an ID file.
-   * The old ID file will be overwritten.
+   * Persistent a {@link DatanodeDetails} to a local file.
    *
-   * @param ids A set of {@link DatanodeID}
-   * @param path Local ID file path
-   * @throws IOException When read/write error occurs
+   * @throws IOException when read/write error occurs
    */
-  private synchronized static void writeDatanodeIDs(List<DatanodeID> ids,
-      File path) throws IOException {
+  public synchronized static void writeDatanodeDetailsTo(
+      DatanodeDetails datanodeDetails, File path) throws IOException {
     if (path.exists()) {
       if (!path.delete() || !path.createNewFile()) {
         throw new IOException("Unable to overwrite the datanode ID file.");
@@ -417,61 +412,30 @@ public final class ContainerUtils {
       }
     }
     try (FileOutputStream out = new FileOutputStream(path)) {
-      for (DatanodeID id : ids) {
-        HdfsProtos.DatanodeIDProto dnId = id.getProtoBufMessage();
-        dnId.writeDelimitedTo(out);
-      }
+      HdslProtos.DatanodeDetailsProto proto =
+          datanodeDetails.getProtoBufMessage();
+      proto.writeTo(out);
     }
   }
 
   /**
-   * Persistent a {@link DatanodeID} to a local file.
-   * It reads the IDs first and append a new entry only if the ID is new.
-   * This is to avoid on some dirty environment, this file gets too big.
-   *
-   * @throws IOException when read/write error occurs
-   */
-  public synchronized static void writeDatanodeIDTo(DatanodeID dnID,
-      File path) throws IOException {
-    List<DatanodeID> ids = ContainerUtils.readDatanodeIDsFrom(path);
-    // Only create or overwrite the file
-    // if the ID doesn't exist in the ID file
-    for (DatanodeID id : ids) {
-      if (id.getProtoBufMessage()
-          .equals(dnID.getProtoBufMessage())) {
-        return;
-      }
-    }
-    ids.add(dnID);
-    writeDatanodeIDs(ids, path);
-  }
-
-  /**
-   * Read {@link DatanodeID} from a local ID file and return a set of
-   * datanode IDs. If the ID file doesn't exist, an empty set is returned.
+   * Read {@link DatanodeDetails} from a local ID file.
    *
    * @param path ID file local path
-   * @return A set of {@link DatanodeID}
+   * @return {@link DatanodeDetails}
    * @throws IOException If the id file is malformed or other I/O exceptions
    */
-  public synchronized static List<DatanodeID> readDatanodeIDsFrom(File path)
+  public synchronized static DatanodeDetails readDatanodeDetailsFrom(File path)
       throws IOException {
-    List<DatanodeID> ids = new ArrayList<DatanodeID>();
     if (!path.exists()) {
-      return ids;
+      throw new IOException("Datanode ID file not found.");
     }
     try(FileInputStream in = new FileInputStream(path)) {
-      while(in.available() > 0) {
-        try {
-          HdfsProtos.DatanodeIDProto id =
-              HdfsProtos.DatanodeIDProto.parseDelimitedFrom(in);
-          ids.add(DatanodeID.getFromProtoBuf(id));
-        } catch (IOException e) {
-          throw new IOException("Failed to parse Datanode ID from "
-              + path.getAbsolutePath(), e);
-        }
-      }
+      return DatanodeDetails.getFromProtoBuf(
+          HdslProtos.DatanodeDetailsProto.parseFrom(in));
+    } catch (IOException e) {
+      throw new IOException("Failed to parse DatanodeDetails from "
+          + path.getAbsolutePath(), e);
     }
-    return ids;
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java
index d4b4de5..35a2b64 100644
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/ContainerManagerImpl.java
@@ -23,6 +23,7 @@ import com.google.common.base.Preconditions;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
 import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos;
 import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState;
@@ -30,7 +31,6 @@ import org.apache.hadoop.hdsl.protocol.proto
     .StorageContainerDatanodeProtocolProtos.SCMNodeReport;
 import org.apache.hadoop.hdsl.protocol.proto
     .StorageContainerDatanodeProtocolProtos.SCMStorageReport;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
 import org.apache.hadoop.ozone.container.common.helpers.KeyData;
 import org.apache.hadoop.ozone.container.common.helpers.KeyUtils;
@@ -111,7 +111,7 @@ public class ContainerManagerImpl implements 
ContainerManager {
   private ChunkManager chunkManager;
   private KeyManager keyManager;
   private Configuration conf;
-  private DatanodeID datanodeID;
+  private DatanodeDetails datanodeDetails;
 
   private ContainerDeletionChoosingPolicy containerDeletionChooser;
   private ContainerReportManager containerReportManager;
@@ -121,24 +121,24 @@ public class ContainerManagerImpl implements 
ContainerManager {
    *
    * @param config - Configuration.
    * @param containerDirs - List of Metadata Container locations.
-   * @param datanode - Datanode ID.
+   * @param dnDetails - DatanodeDetails.
    * @throws IOException
    */
   @Override
   public void init(
       Configuration config, List<StorageLocation> containerDirs,
-      DatanodeID datanode) throws IOException {
+      DatanodeDetails dnDetails) throws IOException {
     Preconditions.checkNotNull(config, "Config must not be null");
     Preconditions.checkNotNull(containerDirs, "Container directories cannot " +
         "be null");
-    Preconditions.checkNotNull(datanode, "Datanode ID cannot " +
+    Preconditions.checkNotNull(dnDetails, "Datanode Details cannot " +
         "be null");
 
     Preconditions.checkState(containerDirs.size() > 0, "Number of container" +
         " directories must be greater than zero.");
 
     this.conf = config;
-    this.datanodeID = datanode;
+    this.datanodeDetails = dnDetails;
 
     readLock();
     try {
@@ -747,7 +747,7 @@ public class ContainerManagerImpl implements 
ContainerManager {
 
   }
 
-    @Override
+  @Override
   public void readLockInterruptibly() throws InterruptedException {
     this.lock.readLock().lockInterruptibly();
   }
@@ -883,7 +883,7 @@ public class ContainerManagerImpl implements 
ContainerManager {
         ContainerReportsRequestProto.newBuilder();
 
     // TODO: support delta based container report
-    crBuilder.setDatanodeID(datanodeID.getProtoBufMessage())
+    crBuilder.setDatanodeDetails(datanodeDetails.getProtoBufMessage())
         .setType(ContainerReportsRequestProto.reportType.fullReport);
 
     for (ContainerStatus container: containers) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
index 8bead9b..3f19992 100644
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/interfaces/ContainerManager.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.ozone.container.common.interfaces;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
 import org.apache.hadoop.hdfs.util.RwLock;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
 import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ReportState;
 import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.SCMNodeReport;
@@ -48,11 +48,11 @@ public interface ContainerManager extends RwLock {
    *
    * @param config        - Configuration.
    * @param containerDirs - List of Metadata Container locations.
-   * @param datanodeID - Datanode ID
+   * @param datanodeDetails - DatanodeDetails
    * @throws StorageContainerException
    */
   void init(Configuration config, List<StorageLocation> containerDirs,
-      DatanodeID datanodeID) throws IOException;
+            DatanodeDetails datanodeDetails) throws IOException;
 
   /**
    * Creates a container with the given name.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
index df33ce1..91fa9c3 100644
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.ozone.container.common.statemachine;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.ozone.container.common.statemachine.commandhandler
     .CloseContainerHandler;
 import 
org.apache.hadoop.ozone.container.common.statemachine.commandhandler.CommandDispatcher;
@@ -54,7 +54,7 @@ public class DatanodeStateMachine implements Closeable {
   private final long heartbeatFrequency;
   private StateContext context;
   private final OzoneContainer container;
-  private DatanodeID datanodeID = null;
+  private DatanodeDetails datanodeDetails;
   private final CommandDispatcher commandDispatcher;
   private long commandsHandled;
   private AtomicLong nextHB;
@@ -64,12 +64,13 @@ public class DatanodeStateMachine implements Closeable {
   /**
    * Constructs a a datanode state machine.
    *
-   * @param datanodeID - DatanodeID used to identify a datanode
+   * @param datanodeDetails - DatanodeDetails used to identify a datanode
    * @param conf - Configuration.
    */
-  public DatanodeStateMachine(DatanodeID datanodeID,
+  public DatanodeStateMachine(DatanodeDetails datanodeDetails,
       Configuration conf) throws IOException {
     this.conf = conf;
+    this.datanodeDetails = datanodeDetails;
     executorService = HadoopExecutors.newCachedThreadPool(
                 new ThreadFactoryBuilder().setDaemon(true)
             .setNameFormat("Datanode State Machine Thread - %d").build());
@@ -77,8 +78,8 @@ public class DatanodeStateMachine implements Closeable {
     context = new StateContext(this.conf, DatanodeStates.getInitState(), this);
     heartbeatFrequency = TimeUnit.SECONDS.toMillis(
         getScmHeartbeatInterval(conf));
-    container = new OzoneContainer(datanodeID, new OzoneConfiguration(conf));
-    this.datanodeID = datanodeID;
+    container = new OzoneContainer(this.datanodeDetails,
+        new OzoneConfiguration(conf));
     nextHB = new AtomicLong(Time.monotonicNow());
 
      // When we add new handlers just adding a new handler here should do the
@@ -94,20 +95,17 @@ public class DatanodeStateMachine implements Closeable {
         .build();
   }
 
-  public void setDatanodeID(DatanodeID datanodeID) {
-    this.datanodeID = datanodeID;
-  }
-
   /**
    *
-   * Return DatanodeID if set, return null otherwise.
+   * Return DatanodeDetails if set, return null otherwise.
    *
-   * @return datanodeID
+   * @return DatanodeDetails
    */
-  public DatanodeID getDatanodeID() {
-    return this.datanodeID;
+  public DatanodeDetails getDatanodeDetails() {
+    return datanodeDetails;
   }
 
+
   /**
    * Returns the Connection manager for this state machine.
    *

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java
index bd23f94..08f47a2 100644
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/InitDatanodeState.java
@@ -18,15 +18,15 @@ package 
org.apache.hadoop.ozone.container.common.states.datanode;
 
 import com.google.common.base.Strings;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdsl.HdslUtils;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
 import 
org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
 import 
org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager;
 import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
 import org.apache.hadoop.ozone.container.common.states.DatanodeState;
-import org.apache.hadoop.scm.ScmConfigKeys;
 
+import org.apache.hadoop.scm.ScmConfigKeys;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -97,32 +97,22 @@ public class InitDatanodeState implements DatanodeState,
     }
 
     // If datanode ID is set, persist it to the ID file.
-    persistContainerDatanodeID();
+    persistContainerDatanodeDetails();
 
     return this.context.getState().getNextState();
   }
 
   /**
-   * Update Ozone container port to the datanode ID,
-   * and persist the ID to a local file.
+   * Persist DatanodeDetails to datanode.id file.
    */
-  private void persistContainerDatanodeID() throws IOException {
-    String dataNodeIDPath = HdslUtils.getDatanodeIDPath(conf);
-    if (Strings.isNullOrEmpty(dataNodeIDPath)) {
-      LOG.error("A valid file path is needed for config setting {}",
-          ScmConfigKeys.OZONE_SCM_DATANODE_ID);
-      this.context.setState(DatanodeStateMachine.DatanodeStates.SHUTDOWN);
-      return;
-    }
+  private void persistContainerDatanodeDetails() throws IOException {
+    String dataNodeIDPath = HdslUtils.getDatanodeIdFilePath(conf);
     File idPath = new File(dataNodeIDPath);
-    int containerPort = this.context.getContainerPort();
-    int ratisPort = this.context.getRatisPort();
-    DatanodeID datanodeID = this.context.getParent().getDatanodeID();
-    if (datanodeID != null) {
-      datanodeID.setContainerPort(containerPort);
-      datanodeID.setRatisPort(ratisPort);
-      ContainerUtils.writeDatanodeIDTo(datanodeID, idPath);
-      LOG.info("Datanode ID is persisted to {}", dataNodeIDPath);
+    DatanodeDetails datanodeDetails = this.context.getParent()
+        .getDatanodeDetails();
+    if (datanodeDetails != null && !idPath.exists()) {
+      ContainerUtils.writeDatanodeDetailsTo(datanodeDetails, idPath);
+      LOG.info("DatanodeDetails is persisted to {}", dataNodeIDPath);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java
index 7153228..7a8c17b 100644
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/datanode/RunningDatanodeState.java
@@ -16,11 +16,7 @@
  */
 package org.apache.hadoop.ozone.container.common.states.datanode;
 
-import com.google.common.base.Preconditions;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
-import org.apache.hadoop.hdsl.HdslUtils;
-import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
 import 
org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
 import 
org.apache.hadoop.ozone.container.common.statemachine.EndpointStateMachine;
 import 
org.apache.hadoop.ozone.container.common.statemachine.SCMConnectionManager;
@@ -29,15 +25,10 @@ import 
org.apache.hadoop.ozone.container.common.states.DatanodeState;
 import 
org.apache.hadoop.ozone.container.common.states.endpoint.HeartbeatEndpointTask;
 import 
org.apache.hadoop.ozone.container.common.states.endpoint.RegisterEndpointTask;
 import 
org.apache.hadoop.ozone.container.common.states.endpoint.VersionEndpointTask;
-import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos;
-import org.apache.hadoop.scm.ScmConfigKeys;
 import org.apache.hadoop.util.Time;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.nio.file.Path;
-import java.nio.file.Paths;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.concurrent.Callable;
@@ -69,75 +60,6 @@ public class RunningDatanodeState implements DatanodeState {
   }
 
   /**
-   * Reads a datanode ID from the persisted information.
-   *
-   * @param idPath - Path to the ID File.
-   * @return DatanodeID
-   * @throws IOException
-   */
-  private StorageContainerDatanodeProtocolProtos.ContainerNodeIDProto
-      readPersistedDatanodeID(Path idPath) throws IOException {
-    Preconditions.checkNotNull(idPath);
-    DatanodeID datanodeID = null;
-    List<DatanodeID> datanodeIDs =
-        ContainerUtils.readDatanodeIDsFrom(idPath.toFile());
-    int containerPort = this.context.getContainerPort();
-    for(DatanodeID dnId : datanodeIDs) {
-      if(dnId.getContainerPort() == containerPort) {
-        datanodeID = dnId;
-        break;
-      }
-    }
-
-    if (datanodeID == null) {
-      throw new IOException("No valid datanode ID found from "
-          + idPath.toFile().getAbsolutePath()
-          + " that matches container port "
-          + containerPort);
-    } else {
-      StorageContainerDatanodeProtocolProtos.ContainerNodeIDProto
-          containerIDProto =
-          StorageContainerDatanodeProtocolProtos
-              .ContainerNodeIDProto
-              .newBuilder()
-              .setDatanodeID(datanodeID.getProtoBufMessage())
-              .build();
-      return containerIDProto;
-    }
-  }
-
-  /**
-   * Returns ContainerNodeIDProto or null in case of Error.
-   *
-   * @return ContainerNodeIDProto
-   */
-  private StorageContainerDatanodeProtocolProtos.ContainerNodeIDProto
-      getContainerNodeID() {
-    String dataNodeIDPath = HdslUtils.getDatanodeIDPath(conf);
-    if (dataNodeIDPath == null || dataNodeIDPath.isEmpty()) {
-      LOG.error("A valid file path is needed for config setting {}",
-          ScmConfigKeys.OZONE_SCM_DATANODE_ID);
-
-      // This is an unrecoverable error.
-      this.context.setState(DatanodeStateMachine.DatanodeStates.SHUTDOWN);
-      return null;
-    }
-    StorageContainerDatanodeProtocolProtos.ContainerNodeIDProto nodeID;
-    // try to read an existing ContainerNode ID.
-    try {
-      nodeID = readPersistedDatanodeID(Paths.get(dataNodeIDPath));
-      if (nodeID != null) {
-        LOG.trace("Read Node ID :", nodeID.getDatanodeID().getDatanodeUuid());
-        return nodeID;
-      }
-    } catch (IOException ex) {
-      LOG.trace("Not able to find container Node ID, creating it.", ex);
-    }
-    this.context.setState(DatanodeStateMachine.DatanodeStates.SHUTDOWN);
-    return null;
-  }
-
-  /**
    * Called before entering this state.
    */
   @Override
@@ -178,13 +100,13 @@ public class RunningDatanodeState implements 
DatanodeState {
       return  RegisterEndpointTask.newBuilder()
           .setConfig(conf)
           .setEndpointStateMachine(endpoint)
-          .setNodeID(getContainerNodeID())
+          .setDatanodeDetails(context.getParent().getDatanodeDetails())
           .build();
     case HEARTBEAT:
       return HeartbeatEndpointTask.newBuilder()
           .setConfig(conf)
           .setEndpointStateMachine(endpoint)
-          .setNodeID(getContainerNodeID())
+          .setDatanodeDetails(context.getParent().getDatanodeDetails())
           .setContext(context)
           .build();
     case SHUTDOWN:

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java
 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java
index 79496da..29f1f9c 100644
--- 
a/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java
+++ 
b/hadoop-hdsl/container-service/src/main/java/org/apache/hadoop/ozone/container/common/states/endpoint/HeartbeatEndpointTask.java
@@ -20,7 +20,8 @@ package 
org.apache.hadoop.ozone.container.common.states.endpoint;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
+import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.DatanodeDetailsProto;
 import org.apache.hadoop.ozone.container.common.helpers
     .DeletedContainerBlocksSummary;
 import org.apache.hadoop.ozone.container.common.statemachine
@@ -30,8 +31,6 @@ import org.apache.hadoop.ozone.container.common.statemachine
 import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
 import org.apache.hadoop.ozone.protocol.commands.CloseContainerCommand;
 import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
-import org.apache.hadoop.hdsl.protocol.proto
-    .StorageContainerDatanodeProtocolProtos.ContainerNodeIDProto;
 import org.apache.hadoop.ozone.protocol.commands.SendContainerCommand;
 import org.apache.hadoop.hdsl.protocol.proto
      .StorageContainerDatanodeProtocolProtos.SCMCommandResponseProto;
@@ -53,7 +52,7 @@ public class HeartbeatEndpointTask
       LoggerFactory.getLogger(HeartbeatEndpointTask.class);
   private final EndpointStateMachine rpcEndpoint;
   private final Configuration conf;
-  private ContainerNodeIDProto containerNodeIDProto;
+  private DatanodeDetailsProto datanodeDetailsProto;
   private StateContext context;
 
   /**
@@ -73,18 +72,18 @@ public class HeartbeatEndpointTask
    *
    * @return ContainerNodeIDProto
    */
-  public ContainerNodeIDProto getContainerNodeIDProto() {
-    return containerNodeIDProto;
+  public DatanodeDetailsProto getDatanodeDetailsProto() {
+    return datanodeDetailsProto;
   }
 
   /**
    * Set container node ID proto.
    *
-   * @param containerNodeIDProto - the node id.
+   * @param datanodeDetailsProto - the node id.
    */
-  public void setContainerNodeIDProto(ContainerNodeIDProto
-      containerNodeIDProto) {
-    this.containerNodeIDProto = containerNodeIDProto;
+  public void setDatanodeDetailsProto(DatanodeDetailsProto
+      datanodeDetailsProto) {
+    this.datanodeDetailsProto = datanodeDetailsProto;
   }
 
   /**
@@ -97,14 +96,12 @@ public class HeartbeatEndpointTask
   public EndpointStateMachine.EndPointStates call() throws Exception {
     rpcEndpoint.lock();
     try {
-      Preconditions.checkState(this.containerNodeIDProto != null);
-      DatanodeID datanodeID = DatanodeID.getFromProtoBuf(this
-          .containerNodeIDProto.getDatanodeID());
+      Preconditions.checkState(this.datanodeDetailsProto != null);
 
       SCMHeartbeatResponseProto reponse = rpcEndpoint.getEndPoint()
-          .sendHeartbeat(datanodeID, this.context.getNodeReport(),
+          .sendHeartbeat(datanodeDetailsProto, this.context.getNodeReport(),
               this.context.getContainerReportState());
-      processResponse(reponse, datanodeID);
+      processResponse(reponse, datanodeDetailsProto);
       rpcEndpoint.setLastSuccessfulHeartbeat(ZonedDateTime.now());
       rpcEndpoint.zeroMissedCount();
     } catch (IOException ex) {
@@ -129,12 +126,12 @@ public class HeartbeatEndpointTask
    * @param response - SCMHeartbeat response.
    */
   private void processResponse(SCMHeartbeatResponseProto response,
-      final DatanodeID datanodeID) {
+      final DatanodeDetailsProto datanodeDetails) {
     for (SCMCommandResponseProto commandResponseProto : response
         .getCommandsList()) {
       // Verify the response is indeed for this datanode.
       Preconditions.checkState(commandResponseProto.getDatanodeUUID()
-          .equalsIgnoreCase(datanodeID.getDatanodeUuid().toString()),
+          .equalsIgnoreCase(datanodeDetails.getUuid()),
           "Unexpected datanode ID in the response.");
       switch (commandResponseProto.getCmdType()) {
       case sendContainerReport:
@@ -190,7 +187,7 @@ public class HeartbeatEndpointTask
   public static class Builder {
     private EndpointStateMachine endPointStateMachine;
     private Configuration conf;
-    private ContainerNodeIDProto containerNodeIDProto;
+    private DatanodeDetails datanodeDetails;
     private StateContext context;
 
     /**
@@ -224,11 +221,11 @@ public class HeartbeatEndpointTask
     /**
      * Sets the NodeID.
      *
-     * @param nodeID - NodeID proto
+     * @param dnDetails - NodeID proto
      * @return Builder
      */
-    public Builder setNodeID(ContainerNodeIDProto nodeID) {
-      this.containerNodeIDProto = nodeID;
+    public Builder setDatanodeDetails(DatanodeDetails dnDetails) {
+      this.datanodeDetails = dnDetails;
       return this;
     }
 
@@ -255,15 +252,15 @@ public class HeartbeatEndpointTask
             " construct HeartbeatEndpointTask task");
       }
 
-      if (containerNodeIDProto == null) {
-        LOG.error("No nodeID specified.");
+      if (datanodeDetails == null) {
+        LOG.error("No datanode specified.");
         throw new IllegalArgumentException("A vaild Node ID is needed to " +
             "construct HeartbeatEndpointTask task");
       }
 
       HeartbeatEndpointTask task = new HeartbeatEndpointTask(this
           .endPointStateMachine, this.conf, this.context);
-      task.setContainerNodeIDProto(containerNodeIDProto);
+      task.setDatanodeDetailsProto(datanodeDetails.getProtoBufMessage());
       return task;
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to