This is an automated email from the ASF dual-hosted git repository.

sodonnell pushed a commit to branch HDDS-1880-Decom
in repository https://gitbox.apache.org/repos/asf/hadoop-ozone.git


The following commit(s) were added to refs/heads/HDDS-1880-Decom by this push:
     new f64476c  HDDS-4340. Add Operational State to the datanode list command 
(#1491)
f64476c is described below

commit f64476c06e6268fbec683f6104ff39876adbb90c
Author: Stephen O'Donnell <stephen.odonn...@gmail.com>
AuthorDate: Wed Oct 21 17:10:30 2020 +0100

    HDDS-4340. Add Operational State to the datanode list command (#1491)
---
 hadoop-hdds/tools/pom.xml                          |   6 ++
 .../hdds/scm/cli/datanode/ListInfoSubcommand.java  |  46 ++++++--
 .../scm/cli/datanode/TestListInfoSubcommand.java   | 119 +++++++++++++++++++++
 3 files changed, 164 insertions(+), 7 deletions(-)

diff --git a/hadoop-hdds/tools/pom.xml b/hadoop-hdds/tools/pom.xml
index dfda5a6..9da6f93 100644
--- a/hadoop-hdds/tools/pom.xml
+++ b/hadoop-hdds/tools/pom.xml
@@ -78,6 +78,12 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd";>
       <groupId>org.xerial</groupId>
       <artifactId>sqlite-jdbc</artifactId>
     </dependency>
+      <dependency>
+          <groupId>org.mockito</groupId>
+          <artifactId>mockito-all</artifactId>
+          <version>${mockito1-hadoop.version}</version>
+          <scope>test</scope>
+      </dependency>
 
   </dependencies>
 </project>
diff --git 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
index 2ce9c87..38ad390 100644
--- 
a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
+++ 
b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
@@ -60,30 +60,34 @@ public class ListInfoSubcommand extends ScmSubcommand {
     if (Strings.isNullOrEmpty(ipaddress) && Strings.isNullOrEmpty(uuid)) {
       getAllNodes(scmClient).forEach(this::printDatanodeInfo);
     } else {
-      Stream<DatanodeDetails> allNodes = getAllNodes(scmClient).stream();
+      Stream<DatanodeWithAttributes> allNodes = 
getAllNodes(scmClient).stream();
       if (!Strings.isNullOrEmpty(ipaddress)) {
-        allNodes = allNodes.filter(p -> p.getIpAddress()
+        allNodes = allNodes.filter(p -> p.getDatanodeDetails().getIpAddress()
             .compareToIgnoreCase(ipaddress) == 0);
       }
       if (!Strings.isNullOrEmpty(uuid)) {
-        allNodes = allNodes.filter(p -> p.getUuid().toString().equals(uuid));
+        allNodes = allNodes.filter(p ->
+            p.getDatanodeDetails().toString().equals(uuid));
       }
       allNodes.forEach(this::printDatanodeInfo);
     }
   }
 
-  private List<DatanodeDetails> getAllNodes(ScmClient scmClient)
+  private List<DatanodeWithAttributes> getAllNodes(ScmClient scmClient)
       throws IOException {
     List<HddsProtos.Node> nodes = scmClient.queryNode(null,
         HddsProtos.NodeState.HEALTHY, HddsProtos.QueryScope.CLUSTER, "");
 
     return nodes.stream()
-        .map(p -> DatanodeDetails.getFromProtoBuf(p.getNodeID()))
+        .map(p -> new DatanodeWithAttributes(
+            DatanodeDetails.getFromProtoBuf(p.getNodeID()),
+            p.getNodeOperationalStates(0), p.getNodeStates(0)))
         .collect(Collectors.toList());
   }
 
-  private void printDatanodeInfo(DatanodeDetails datanode) {
+  private void printDatanodeInfo(DatanodeWithAttributes dna) {
     StringBuilder pipelineListInfo = new StringBuilder();
+    DatanodeDetails datanode = dna.getDatanodeDetails();
     int relatedPipelineNum = 0;
     if (!pipelines.isEmpty()) {
       List<Pipeline> relatedPipelines = pipelines.stream().filter(
@@ -108,6 +112,34 @@ public class ListInfoSubcommand extends ScmSubcommand {
     System.out.println("Datanode: " + datanode.getUuid().toString() +
         " (" + datanode.getNetworkLocation() + "/" + datanode.getIpAddress()
         + "/" + datanode.getHostName() + "/" + relatedPipelineNum +
-        " pipelines) \n" + "Related pipelines: \n" + pipelineListInfo);
+        " pipelines)");
+    System.out.println("Operational State: " + dna.getOpState());
+    System.out.println("Related pipelines: \n" + pipelineListInfo);
+  }
+
+  private static class DatanodeWithAttributes {
+    private DatanodeDetails datanodeDetails;
+    private HddsProtos.NodeOperationalState operationalState;
+    private HddsProtos.NodeState healthState;
+
+    DatanodeWithAttributes(DatanodeDetails dn,
+        HddsProtos.NodeOperationalState opState,
+        HddsProtos.NodeState healthState) {
+      this.datanodeDetails = dn;
+      this.operationalState = opState;
+      this.healthState = healthState;
+    }
+
+    public DatanodeDetails getDatanodeDetails() {
+      return datanodeDetails;
+    }
+
+    public HddsProtos.NodeOperationalState getOpState() {
+      return operationalState;
+    }
+
+    public HddsProtos.NodeState getHealthState() {
+      return healthState;
+    }
   }
 }
diff --git 
a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/datanode/TestListInfoSubcommand.java
 
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/datanode/TestListInfoSubcommand.java
new file mode 100644
index 0000000..45d4d7b
--- /dev/null
+++ 
b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/datanode/TestListInfoSubcommand.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+package org.apache.hadoop.hdds.scm.cli.datanode;
+
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.mockito.Mockito;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.any;
+
+/**
+ * Unit tests to validate the the TestListInfoSubCommand class includes the
+ * correct output when executed against a mock client.
+ */
+public class TestListInfoSubcommand {
+
+  private ListInfoSubcommand cmd;
+  private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+  private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
+  private final PrintStream originalOut = System.out;
+  private final PrintStream originalErr = System.err;
+
+  @Before
+  public void setup() {
+    cmd = new ListInfoSubcommand();
+    System.setOut(new PrintStream(outContent));
+    System.setErr(new PrintStream(errContent));
+  }
+
+  @After
+  public void tearDown() {
+    System.setOut(originalOut);
+    System.setErr(originalErr);
+  }
+
+  @Test
+  public void testDataNodeOperationalStateIncludedInOutput() throws Exception {
+    ScmClient scmClient = mock(ScmClient.class);
+    
Mockito.when(scmClient.queryNode(any(HddsProtos.NodeOperationalState.class),
+        any(HddsProtos.NodeState.class), any(HddsProtos.QueryScope.class),
+        Mockito.anyString()))
+        .thenAnswer(invocation -> getNodeDetails());
+    Mockito.when(scmClient.listPipelines())
+        .thenReturn(new ArrayList<>());
+
+    cmd.execute(scmClient);
+
+    // The output should contain a string like:
+    // <other lines>
+    // Operational State: <STATE>
+    // <other lines>
+    Pattern p = Pattern.compile(
+        "^Operational State:\\s+IN_SERVICE$", Pattern.MULTILINE);
+    Matcher m = p.matcher(outContent.toString());
+    assertTrue(m.find());
+    // Should also have a node with the state DECOMMISSIONING
+    p = Pattern.compile(
+        "^Operational State:\\s+DECOMMISSIONING$", Pattern.MULTILINE);
+    m = p.matcher(outContent.toString());
+    assertTrue(m.find());
+  }
+
+  private List<HddsProtos.Node> getNodeDetails() {
+    List<HddsProtos.Node> nodes = new ArrayList<>();
+
+    for (int i=0; i<2; i++) {
+      HddsProtos.DatanodeDetailsProto.Builder dnd =
+          HddsProtos.DatanodeDetailsProto.newBuilder();
+      dnd.setHostName("host" + i);
+      dnd.setIpAddress("1.2.3." + i+1);
+      dnd.setNetworkLocation("/default");
+      dnd.setNetworkName("host" + i);
+      dnd.addPorts(HddsProtos.Port.newBuilder()
+          .setName("ratis").setValue(5678).build());
+      dnd.setUuid(UUID.randomUUID().toString());
+
+      HddsProtos.Node.Builder builder  = HddsProtos.Node.newBuilder();
+      if (i == 0) {
+        builder.addNodeOperationalStates(
+            HddsProtos.NodeOperationalState.IN_SERVICE);
+      } else {
+        builder.addNodeOperationalStates(
+            HddsProtos.NodeOperationalState.DECOMMISSIONING);
+      }
+      builder.addNodeStates(HddsProtos.NodeState.HEALTHY);
+      builder.setNodeID(dnd.build());
+      nodes.add(builder.build());
+    }
+    return nodes;
+  }
+}
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: ozone-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: ozone-commits-h...@hadoop.apache.org

Reply via email to