Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7240 ab8fb0124 -> 3440ca6e0


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java
index 95c65ef..5fc6e04 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestRatisManager.java
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.ozone.container.ozoneimpl;
 
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.ozone.MiniOzoneClassicCluster;
 import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
 import org.apache.hadoop.ozone.OzoneConsts;
@@ -83,15 +83,17 @@ public class TestRatisManager {
       cluster.waitOzoneReady();
 
       final List<DataNode> datanodes = cluster.getDataNodes();
-      final List<DatanodeID> allIds = datanodes.stream()
-          .map(DataNode::getDatanodeId).collect(Collectors.toList());
+      final List<DatanodeDetails> datanodeDetailsSet = datanodes.stream()
+          .map(MiniOzoneClassicCluster::getDatanodeDetails).collect(
+              Collectors.toList());
 
       //final RatisManager manager = RatisManager.newRatisManager(conf);
 
       final int[] idIndex = {3, 4, 5};
       for (int i = 0; i < idIndex.length; i++) {
         final int previous = i == 0 ? 0 : idIndex[i - 1];
-        final List<DatanodeID> subIds = allIds.subList(previous, idIndex[i]);
+        final List<DatanodeDetails> subIds = datanodeDetailsSet.subList(
+            previous, idIndex[i]);
 
         // Create Ratis cluster
         final String ratisId = "ratis" + i;
@@ -99,7 +101,7 @@ public class TestRatisManager {
         LOG.info("Created RatisCluster " + ratisId);
 
         // check Ratis cluster members
-        //final List<DatanodeID> dns = manager.getMembers(ratisId);
+        //final List<DatanodeDetails> dns = manager.getMembers(ratisId);
         //Assert.assertEquals(subIds, dns);
       }
 
@@ -119,7 +121,7 @@ public class TestRatisManager {
       //manager.updatePipeline(ratisId, allIds);
 
       // check Ratis cluster members
-      //final List<DatanodeID> dns = manager.getMembers(ratisId);
+      //final List<DatanodeDetails> dns = manager.getMembers(ratisId);
       //Assert.assertEquals(allIds, dns);
     } finally {
       cluster.shutdown();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java
index 1cf7732..cfe6a39 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/container/server/TestContainerServer.java
@@ -19,10 +19,10 @@
 package org.apache.hadoop.ozone.container.server;
 
 import io.netty.channel.embedded.EmbeddedChannel;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
 import 
org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
 import 
org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
 import org.apache.hadoop.ozone.RatisTestHelper;
@@ -34,6 +34,7 @@ import 
org.apache.hadoop.ozone.container.common.transport.server.XceiverServer;
 import 
org.apache.hadoop.ozone.container.common.transport.server.XceiverServerHandler;
 import 
org.apache.hadoop.ozone.container.common.transport.server.XceiverServerSpi;
 import 
org.apache.hadoop.ozone.container.common.transport.server.ratis.XceiverServerRatis;
+import org.apache.hadoop.ozone.scm.TestUtils;
 import org.apache.hadoop.ozone.web.utils.OzoneUtils;
 import org.apache.hadoop.scm.XceiverClient;
 import org.apache.hadoop.scm.XceiverClientRatis;
@@ -95,11 +96,13 @@ public class TestContainerServer {
 
   @Test
   public void testClientServer() throws Exception {
+    DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
     runTestClientServer(1,
         (pipeline, conf) -> conf.setInt(OzoneConfigKeys.DFS_CONTAINER_IPC_PORT,
             pipeline.getLeader().getContainerPort()),
         XceiverClient::new,
-        (dn, conf) -> new XceiverServer(conf, new TestContainerDispatcher()),
+        (dn, conf) -> new XceiverServer(datanodeDetails, conf,
+            new TestContainerDispatcher()),
         (dn, p) -> {});
   }
 
@@ -121,11 +124,10 @@ public class TestContainerServer {
   }
 
   static XceiverServerRatis newXceiverServerRatis(
-      DatanodeID dn, OzoneConfiguration conf) throws IOException {
-    final String id = dn.getXferAddr();
+      DatanodeDetails dn, OzoneConfiguration conf) throws IOException {
     conf.setInt(OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_PORT,
         dn.getRatisPort());
-    final String dir = TEST_DIR + id.replace(':', '_');
+    final String dir = TEST_DIR + dn.getUuid();
     conf.set(OzoneConfigKeys.DFS_CONTAINER_RATIS_DATANODE_STORAGE_DIR, dir);
 
     final ContainerDispatcher dispatcher = new TestContainerDispatcher();
@@ -133,8 +135,8 @@ public class TestContainerServer {
   }
 
   static void initXceiverServerRatis(
-      RpcType rpc, DatanodeID id, Pipeline pipeline) throws IOException {
-    final RaftPeer p = RatisHelper.toRaftPeer(id);
+      RpcType rpc, DatanodeDetails dd, Pipeline pipeline) throws IOException {
+    final RaftPeer p = RatisHelper.toRaftPeer(dd);
     final RaftClient client = RatisHelper.newRaftClient(rpc, p);
     client.reinitialize(RatisHelper.newRaftGroup(pipeline), p.getId());
   }
@@ -154,9 +156,9 @@ public class TestContainerServer {
       BiConsumer<Pipeline, OzoneConfiguration> initConf,
       CheckedBiFunction<Pipeline, OzoneConfiguration, XceiverClientSpi,
           IOException> createClient,
-      CheckedBiFunction<DatanodeID, OzoneConfiguration, XceiverServerSpi,
+      CheckedBiFunction<DatanodeDetails, OzoneConfiguration, XceiverServerSpi,
           IOException> createServer,
-      CheckedBiConsumer<DatanodeID, Pipeline, IOException> initServer)
+      CheckedBiConsumer<DatanodeDetails, Pipeline, IOException> initServer)
       throws Exception {
     final List<XceiverServerSpi> servers = new ArrayList<>();
     XceiverClientSpi client = null;
@@ -167,7 +169,7 @@ public class TestContainerServer {
       final OzoneConfiguration conf = new OzoneConfiguration();
       initConf.accept(pipeline, conf);
 
-      for(DatanodeID dn : pipeline.getMachines()) {
+      for(DatanodeDetails dn : pipeline.getMachines()) {
         final XceiverServerSpi s = createServer.apply(dn, conf);
         servers.add(s);
         s.start();
@@ -208,7 +210,8 @@ public class TestContainerServer {
       Dispatcher dispatcher =
               new Dispatcher(mock(ContainerManager.class), conf);
       dispatcher.init();
-      server = new XceiverServer(conf, dispatcher);
+      DatanodeDetails datanodeDetails = TestUtils.getDatanodeDetails();
+      server = new XceiverServer(datanodeDetails, conf, dispatcher);
       client = new XceiverClient(pipeline, conf);
 
       server.start();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java
index aa9615e..5425965 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ksm/TestKeySpaceManagerRestInterface.java
@@ -124,7 +124,7 @@ public class TestKeySpaceManagerRestInterface {
       switch (type) {
       case HTTP:
       case HTTPS:
-        Assert.assertEquals(datanode.getDatanodeId().getOzoneRestPort(),
+        Assert.assertEquals(MiniOzoneClassicCluster.getOzoneRestPort(datanode),
             (int) ports.get(type));
         break;
       default:

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
index 6af0cf8..fdbc15d 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
@@ -117,7 +117,7 @@ public class TestOzoneShell {
     cluster = new MiniOzoneClassicCluster.Builder(conf)
         .setHandlerType(OzoneConsts.OZONE_HANDLER_DISTRIBUTED).build();
     DataNode dataNode = cluster.getDataNodes().get(0);
-    final int port = dataNode.getDatanodeId().getOzoneRestPort();
+    final int port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
     url = String.format("http://localhost:%d";, port);
     client = new OzoneRestClient(String.format("http://localhost:%d";, port));
     client.setUserAuth(OzoneConsts.OZONE_SIMPLE_HDFS_USER);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
index 3d2ffcb..581011f 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.ozone.scm;
 
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ozone.MiniOzoneClassicCluster;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
@@ -214,8 +214,8 @@ public class TestContainerSQLCli {
 
     // verify the sqlite db
     HashMap<String, String> expectedPool = new HashMap<>();
-    for (DatanodeID dnid : nodeManager.getAllNodes()) {
-      expectedPool.put(dnid.getDatanodeUuid(), "DefaultNodePool");
+    for (DatanodeDetails dnid : nodeManager.getAllNodes()) {
+      expectedPool.put(dnid.getUuidString(), "DefaultNodePool");
     }
     Connection conn = connectDB(dbOutPath);
     String sql = "SELECT * FROM nodePool";
@@ -234,7 +234,7 @@ public class TestContainerSQLCli {
   public void testConvertContainerDB() throws Exception {
     String dbOutPath = cluster.getDataDirectory() + "/out_sql.db";
     // TODO : the following will fail due to empty Datanode list, need to fix.
-    //String dnUUID = cluster.getDataNodes().get(0).getDatanodeUuid();
+    //String dnUUID = cluster.getDataNodes().get(0).getUuid();
     String dbRootPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
     String dbPath = dbRootPath + "/" + SCM_CONTAINER_DB;
     String[] args = {"-p", dbPath, "-o", dbOutPath};
@@ -279,8 +279,8 @@ public class TestContainerSQLCli {
       count += 1;
     }
     // the two containers maybe on the same datanode, maybe not.
-    int expected = pipeline1.getLeader().getDatanodeUuid().equals(
-        pipeline2.getLeader().getDatanodeUuid())? 1 : 2;
+    int expected = pipeline1.getLeader().getUuid().equals(
+        pipeline2.getLeader().getUuid())? 1 : 2;
     assertEquals(expected, count);
     Files.delete(Paths.get(dbOutPath));
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java
index 0a7eabf..dbc165c 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMCli.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.ozone.scm;
 
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ozone.MiniOzoneClassicCluster;
 import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
@@ -232,7 +232,8 @@ public class TestSCMCli {
   @Test
   public void testInfoContainer() throws Exception {
     // The cluster has one Datanode server.
-    DatanodeID datanodeID = cluster.getDataNodes().get(0).getDatanodeId();
+    DatanodeDetails datanodeDetails = MiniOzoneClassicCluster
+        .getDatanodeDetails(cluster.getDataNodes().get(0));
     String formatStr =
         "Container Name: %s\n" +
         "Container State: %s\n" +
@@ -276,7 +277,7 @@ public class TestSCMCli {
     String openStatus = data.isOpen() ? "OPEN" : "CLOSED";
     String expected = String.format(formatStr, cname, openStatus,
         data.getDBPath(), data.getContainerPath(), "",
-        datanodeID.getHostName(), datanodeID.getHostName());
+        datanodeDetails.getHostName(), datanodeDetails.getHostName());
     assertEquals(expected, out.toString());
 
     out.reset();
@@ -297,7 +298,7 @@ public class TestSCMCli {
     openStatus = data.isOpen() ? "OPEN" : "CLOSED";
     expected = String.format(formatStr, cname, openStatus,
         data.getDBPath(), data.getContainerPath(), "",
-        datanodeID.getHostName(), datanodeID.getHostName());
+        datanodeDetails.getHostName(), datanodeDetails.getHostName());
     assertEquals(expected, out.toString());
 
     out.reset();
@@ -315,7 +316,7 @@ public class TestSCMCli {
     openStatus = data.isOpen() ? "OPEN" : "CLOSED";
     expected = String.format(formatStrWithHash, cname, openStatus,
         data.getHash(), data.getDBPath(), data.getContainerPath(),
-        "", datanodeID.getHostName(), datanodeID.getHostName());
+        "", datanodeDetails.getHostName(), datanodeDetails.getHostName());
     assertEquals(expected, out.toString());
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java
index 08e212b..6ce6b05 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestSCMMetrics.java
@@ -26,12 +26,11 @@ import java.util.UUID;
 
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.ozone.MiniOzoneClassicCluster;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.ozone.container.common.SCMTestUtils;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerReport;
 import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos;
 import 
org.apache.hadoop.hdsl.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsRequestProto;
@@ -79,7 +78,7 @@ public class TestSCMMetrics {
 
       ContainerReportsRequestProto request = createContainerReport(numReport,
           stat, null);
-      String fstDatanodeID = request.getDatanodeID().getDatanodeUuid();
+      String fstDatanodeUuid = request.getDatanodeDetails().getUuid();
       scmManager.sendContainerReport(request);
 
       // verify container stat metrics
@@ -102,7 +101,7 @@ public class TestSCMMetrics {
 
       // add one new report
       request = createContainerReport(1, stat, null);
-      String sndDatanodeID = request.getDatanodeID().getDatanodeUuid();
+      String sndDatanodeUuid = request.getDatanodeDetails().getUuid();
       scmManager.sendContainerReport(request);
 
       scmMetrics = getMetrics(SCMMetrics.SOURCE_NAME);
@@ -126,11 +125,11 @@ public class TestSCMMetrics {
       // the aggregation.
       stat = new ContainerStat(100, 50, 3, 50, 60, 5, 6);
       scmManager.sendContainerReport(createContainerReport(1, stat,
-          fstDatanodeID));
+          fstDatanodeUuid));
 
       stat = new ContainerStat(1, 1, 1, 1, 1, 1, 1);
       scmManager.sendContainerReport(createContainerReport(1, stat,
-          sndDatanodeID));
+          sndDatanodeUuid));
 
       // the global container metrics value should be updated
       scmMetrics = getMetrics(SCMMetrics.SOURCE_NAME);
@@ -172,7 +171,8 @@ public class TestSCMMetrics {
       StorageContainerManager scmManager = 
cluster.getStorageContainerManager();
 
       DataNode dataNode = cluster.getDataNodes().get(0);
-      String datanodeUuid = dataNode.getDatanodeId().getDatanodeUuid();
+      String datanodeUuid = 
MiniOzoneClassicCluster.getDatanodeDetails(dataNode)
+          .getUuidString();
       ContainerReportsRequestProto request = createContainerReport(numReport,
           stat, datanodeUuid);
       scmManager.sendContainerReport(request);
@@ -236,14 +236,23 @@ public class TestSCMMetrics {
       reportsBuilder.addReports(report.getProtoBufMessage());
     }
 
-    DatanodeID datanodeID;
+    DatanodeDetails datanodeDetails;
     if (datanodeUuid == null) {
-      datanodeID = TestUtils.getDatanodeID();
+      datanodeDetails = TestUtils.getDatanodeDetails();
     } else {
-      datanodeID = new DatanodeID("null", "null", datanodeUuid, 0, 0, 0, 0);
+      datanodeDetails = DatanodeDetails.newBuilder()
+          .setUuid(datanodeUuid)
+          .setIpAddress("127.0.0.1")
+          .setHostName("localhost")
+          .setInfoPort(0)
+          .setInfoSecurePort(0)
+          .setContainerPort(0)
+          .setRatisPort(0)
+          .setOzoneRestPort(0)
+          .build();
     }
 
-    reportsBuilder.setDatanodeID(datanodeID.getProtoBufMessage());
+    reportsBuilder.setDatanodeDetails(datanodeDetails.getProtoBufMessage());
     reportsBuilder.setType(StorageContainerDatanodeProtocolProtos
         .ContainerReportsRequestProto.reportType.fullReport);
     return reportsBuilder.build();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java
index eee9fd8..adfe250 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestDistributedOzoneVolumes.java
@@ -67,7 +67,7 @@ public class TestDistributedOzoneVolumes extends 
TestOzoneHelper {
     cluster = new MiniOzoneClassicCluster.Builder(conf)
         .setHandlerType(OzoneConsts.OZONE_HANDLER_DISTRIBUTED).build();
     DataNode dataNode = cluster.getDataNodes().get(0);
-    port = dataNode.getDatanodeId().getOzoneRestPort();
+    port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java
index fee49d9..d4c03e3 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestLocalOzoneVolumes.java
@@ -70,7 +70,7 @@ public class TestLocalOzoneVolumes extends TestOzoneHelper {
     cluster = new MiniOzoneClassicCluster.Builder(conf)
         .setHandlerType(OzoneConsts.OZONE_HANDLER_LOCAL).build();
     DataNode dataNode = cluster.getDataNodes().get(0);
-    port = dataNode.getDatanodeId().getOzoneRestPort();
+    port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java
index d4c00be..1e42c67 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/TestOzoneWebAccess.java
@@ -79,7 +79,7 @@ public class TestOzoneWebAccess {
     cluster = new MiniOzoneClassicCluster.Builder(conf)
         .setHandlerType(OzoneConsts.OZONE_HANDLER_LOCAL).build();
     DataNode dataNode = cluster.getDataNodes().get(0);
-    port = dataNode.getDatanodeId().getOzoneRestPort();
+    port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java
index 86b0167..d088598 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestBuckets.java
@@ -80,7 +80,7 @@ public class TestBuckets {
     cluster = new MiniOzoneClassicCluster.Builder(conf)
         .setHandlerType(OzoneConsts.OZONE_HANDLER_DISTRIBUTED).build();
     DataNode dataNode = cluster.getDataNodes().get(0);
-    final int port = dataNode.getDatanodeId().getOzoneRestPort();
+    final int port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
     ozoneRestClient = new OzoneRestClient(
         String.format("http://localhost:%d";, port));
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
index 092acc0..49484f3 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
@@ -111,7 +111,7 @@ public class TestKeys {
     ozoneCluster = new MiniOzoneClassicCluster.Builder(conf)
         .setHandlerType(OzoneConsts.OZONE_HANDLER_DISTRIBUTED).build();
     DataNode dataNode = ozoneCluster.getDataNodes().get(0);
-    final int port = dataNode.getDatanodeId().getOzoneRestPort();
+    final int port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
     ozoneRestClient = new OzoneRestClient(
         String.format("http://localhost:%d";, port));
     currentTime = Time.now();
@@ -282,7 +282,7 @@ public class TestKeys {
     cluster.restartDataNode(datanodeIdx);
     // refresh the datanode endpoint uri after datanode restart
     DataNode dataNode = cluster.getDataNodes().get(datanodeIdx);
-    final int port = dataNode.getDatanodeId().getOzoneRestPort();
+    final int port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
     client.setEndPoint(String.format("http://localhost:%d";, port));
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java
index d10bbdd..8db67b2 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestOzoneClient.java
@@ -98,7 +98,7 @@ public class TestOzoneClient {
         .setHandlerType(OzoneConsts.OZONE_HANDLER_DISTRIBUTED).build();
     DataNode dataNode = cluster.getDataNodes().get(0);
     endpoint = String.format("http://localhost:%d";,
-        dataNode.getDatanodeId().getOzoneRestPort());
+        MiniOzoneClassicCluster.getOzoneRestPort(dataNode));
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java
index e68078b..5d20ca2 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestVolume.java
@@ -90,7 +90,7 @@ public class TestVolume {
     cluster = new MiniOzoneClassicCluster.Builder(conf)
         .setHandlerType(OzoneConsts.OZONE_HANDLER_DISTRIBUTED).build();
     DataNode dataNode = cluster.getDataNodes().get(0);
-    final int port = dataNode.getDatanodeId().getOzoneRestPort();
+    final int port = MiniOzoneClassicCluster.getOzoneRestPort(dataNode);
 
     ozoneRestClient = new OzoneRestClient(
         String.format("http://localhost:%d";, port));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/objectstore-service/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-ozone/objectstore-service/pom.xml 
b/hadoop-ozone/objectstore-service/pom.xml
index 9615d2c..de62f31 100644
--- a/hadoop-ozone/objectstore-service/pom.xml
+++ b/hadoop-ozone/objectstore-service/pom.xml
@@ -59,6 +59,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd";>
       <scope>test</scope>
     </dependency>
 
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdsl-container-service</artifactId>
+    </dependency>
+
   </dependencies>
   <build>
     <plugins>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreRestPlugin.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreRestPlugin.java
 
b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreRestPlugin.java
index b43bde3..fab4342 100644
--- 
a/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreRestPlugin.java
+++ 
b/hadoop-ozone/objectstore-service/src/main/java/org/apache/hadoop/ozone/web/ObjectStoreRestPlugin.java
@@ -20,15 +20,18 @@ package org.apache.hadoop.ozone.web;
 import java.io.IOException;
 import java.nio.channels.ServerSocketChannel;
 
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DataNodeServicePlugin;
 import org.apache.hadoop.hdfs.server.datanode.ObjectStoreHandler;
 import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
+import org.apache.hadoop.ozone.HdslDatanodeService;
 import org.apache.hadoop.ozone.web.netty.ObjectStoreRestHttpServer;
 import org.apache.hadoop.ozone.web.utils.OzoneUtils;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.util.ServicePlugin;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,20 +43,22 @@ public class ObjectStoreRestPlugin implements 
DataNodeServicePlugin {
   private static final Logger LOG =
       LoggerFactory.getLogger(ObjectStoreRestPlugin.class);
 
-  private ObjectStoreHandler handler;
-
-  private volatile int restServicePort = -1;
+  private final boolean isOzoneEnabled;
 
+  private Configuration conf;
+  private ObjectStoreHandler handler;
   private ObjectStoreRestHttpServer objectStoreRestHttpServer;
 
   public ObjectStoreRestPlugin() {
-    OzoneConfiguration.activate();
+      OzoneConfiguration.activate();
+      this.conf = new OzoneConfiguration();
+      this.isOzoneEnabled = OzoneUtils.isOzoneEnabled(conf);
   }
 
   @Override
   public void start(Object service) {
     DataNode dataNode = (DataNode) service;
-    if (OzoneUtils.isOzoneEnabled(dataNode.getConf())) {
+    if (isOzoneEnabled) {
       try {
         handler = new ObjectStoreHandler(dataNode.getConf());
         ServerSocketChannel httpServerChannel =
@@ -66,20 +71,23 @@ public class ObjectStoreRestPlugin implements 
DataNodeServicePlugin {
                 handler);
 
         objectStoreRestHttpServer.start();
+        getDatanodeDetails(dataNode).setOzoneRestPort(
+            objectStoreRestHttpServer.getHttpAddress().getPort());
       } catch (IOException e) {
         throw new RuntimeException("Can't start the Object Store Rest server",
             e);
       }
-      synchronized (this) {
-        try {
-          restServicePort =
-              objectStoreRestHttpServer.getHttpAddress().getPort();
-        } finally {
-          //in case fo waiting for the port information: we can continue.
-          this.notify();
-        }
+    }
+  }
+
+  public static DatanodeDetails getDatanodeDetails(DataNode dataNode) {
+    for (ServicePlugin plugin : dataNode.getPlugins()) {
+      if (plugin instanceof HdslDatanodeService) {
+        return ((HdslDatanodeService) plugin).getDatanodeDetails();
       }
     }
+    throw new RuntimeException("Not able to find HdslDatanodeService in the" +
+        " list of plugins loaded by DataNode.");
   }
 
   @Override
@@ -97,17 +105,4 @@ public class ObjectStoreRestPlugin implements 
DataNodeServicePlugin {
     IOUtils.closeQuietly(handler);
   }
 
-  @Override
-  public void onDatanodeIdCreation(DatanodeID dataNodeId) {
-    synchronized (this) {
-      if (restServicePort == -1) {
-        try {
-          this.wait();
-        } catch (InterruptedException e) {
-          LOG.error("Wait for starting up http server is interrupted.");
-        }
-      }
-    }
-    dataNodeId.setOzoneRestPort(restServicePort);
-  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java
 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java
index 3f13109..ef61005 100644
--- 
a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java
+++ 
b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/ksm/KeySpaceManager.java
@@ -22,7 +22,6 @@ import com.google.common.base.Preconditions;
 import com.google.protobuf.BlockingService;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.DFSUtil;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
 import org.apache.hadoop.ipc.Client;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
@@ -878,7 +877,7 @@ public final class KeySpaceManager extends 
ServiceRuntimeInfoImpl
         .getNodesList();
 
     for (HdslProtos.Node node : nodes) {
-      HdfsProtos.DatanodeIDProto datanode = node.getNodeID();
+      HdslProtos.DatanodeDetailsProto datanode = node.getNodeID();
 
       ServiceInfo.Builder dnServiceInfoBuilder = ServiceInfo.newBuilder()
           .setNodeType(HdslProtos.NodeType.DATANODE)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3440ca6e/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
index d8e22fe..bd0c2cc 100644
--- 
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
+++ 
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/SQLCLI.java
@@ -28,8 +28,8 @@ import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.DFSUtilClient;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
 import 
org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.OzoneAclInfo;
 import 
org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.BucketInfo;
 import 
org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.KeyInfo;
@@ -37,7 +37,6 @@ import 
org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.Volu
 import 
org.apache.hadoop.ozone.protocol.proto.KeySpaceManagerProtocolProtos.VolumeList;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
 import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.Pipeline;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
 import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
@@ -98,18 +97,16 @@ public class SQLCLI  extends Configured implements Tool {
       "CREATE TABLE datanodeInfo (" +
           "hostName TEXT NOT NULL, " +
           "datanodeUUId TEXT PRIMARY KEY NOT NULL," +
-          "ipAddr TEXT, " +
-          "xferPort INTEGER," +
+          "ipAddress TEXT, " +
           "infoPort INTEGER," +
-          "ipcPort INTEGER," +
           "infoSecurePort INTEGER," +
           "containerPort INTEGER NOT NULL);";
   private static final String INSERT_CONTAINER_INFO =
       "INSERT INTO containerInfo (containerName, leaderUUID) " +
           "VALUES (\"%s\", \"%s\")";
   private static final String INSERT_DATANODE_INFO =
-      "INSERT INTO datanodeInfo (hostname, datanodeUUid, ipAddr, xferPort, " +
-          "infoPort, ipcPort, infoSecurePort, containerPort) " +
+      "INSERT INTO datanodeInfo (hostname, datanodeUUid, ipAddress, " +
+          "infoPort, infoSecurePort, containerPort) " +
           "VALUES (\"%s\", \"%s\", \"%s\", %d, %d, %d, %d, %d)";
   private static final String INSERT_CONTAINER_MEMBERS =
       "INSERT INTO containerMembers (containerName, datanodeUUID) " +
@@ -536,23 +533,21 @@ public class SQLCLI  extends Configured implements Tool {
         pipeline.getPipelineChannel().getLeaderID());
     executeSQL(conn, insertContainerInfo);
 
-    for (HdfsProtos.DatanodeIDProto dnID :
+    for (HdslProtos.DatanodeDetailsProto dd :
         pipeline.getPipelineChannel().getMembersList()) {
-      String uuid = dnID.getDatanodeUuid();
+      String uuid = dd.getUuid();
       if (!uuidChecked.contains(uuid)) {
         // we may also not use this checked set, but catch exception instead
         // but this seems a bit cleaner.
-        String ipAddr = dnID.getIpAddr();
-        String hostName = dnID.getHostName();
-        int xferPort = dnID.hasXferPort() ? dnID.getXferPort() : 0;
-        int infoPort = dnID.hasInfoPort() ? dnID.getInfoPort() : 0;
+        String ipAddr = dd.getIpAddress();
+        String hostName = dd.getHostName();
+        int infoPort = dd.hasInfoPort() ? dd.getInfoPort() : 0;
         int securePort =
-            dnID.hasInfoSecurePort() ? dnID.getInfoSecurePort() : 0;
-        int ipcPort = dnID.hasIpcPort() ? dnID.getIpcPort() : 0;
-        int containerPort = dnID.getContainerPort();
+            dd.hasInfoSecurePort() ? dd.getInfoSecurePort() : 0;
+        int containerPort = dd.getContainerPort();
         String insertMachineInfo = String.format(
-            INSERT_DATANODE_INFO, hostName, uuid, ipAddr, xferPort, infoPort,
-            ipcPort, securePort, containerPort);
+            INSERT_DATANODE_INFO, hostName, uuid, ipAddr, infoPort,
+            securePort, containerPort);
         executeSQL(conn, insertMachineInfo);
         uuidChecked.add(uuid);
       }
@@ -633,8 +628,9 @@ public class SQLCLI  extends Configured implements Tool {
       executeSQL(conn, CREATE_DATANODE_INFO);
 
       dbStore.iterate(null, (key, value) -> {
-        DatanodeID nodeId = DatanodeID
-            .getFromProtoBuf(HdfsProtos.DatanodeIDProto.PARSER.parseFrom(key));
+        DatanodeDetails nodeId = DatanodeDetails
+            .getFromProtoBuf(HdslProtos.DatanodeDetailsProto
+                .PARSER.parseFrom(key));
         String blockPool = DFSUtil.bytes2String(value);
         try {
           insertNodePoolDB(conn, blockPool, nodeId);
@@ -647,17 +643,17 @@ public class SQLCLI  extends Configured implements Tool {
   }
 
   private void insertNodePoolDB(Connection conn, String blockPool,
-      DatanodeID datanodeID) throws SQLException {
+      DatanodeDetails datanodeDetails) throws SQLException {
     String insertNodePool = String.format(INSERT_NODE_POOL,
-        datanodeID.getDatanodeUuid(), blockPool);
+        datanodeDetails.getUuidString(), blockPool);
     executeSQL(conn, insertNodePool);
 
-    String insertDatanodeID = String.format(INSERT_DATANODE_INFO,
-        datanodeID.getHostName(), datanodeID.getDatanodeUuid(),
-        datanodeID.getIpAddr(), datanodeID.getXferPort(),
-        datanodeID.getInfoPort(), datanodeID.getIpcPort(),
-        datanodeID.getInfoSecurePort(), datanodeID.getContainerPort());
-    executeSQL(conn, insertDatanodeID);
+    String insertDatanodeDetails = String.format(INSERT_DATANODE_INFO,
+        datanodeDetails.getHostName(), datanodeDetails.getUuid(),
+        datanodeDetails.getIpAddress(), datanodeDetails.getInfoPort(),
+        datanodeDetails.getInfoSecurePort(),
+        datanodeDetails.getContainerPort());
+    executeSQL(conn, insertDatanodeDetails);
   }
 
   /**


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to