This is an automated email from the ASF dual-hosted git repository.

xyao pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new c183bd8  HDDS-1527. HDDS Datanode start fails due to datanode.id file 
read error. Contributed by Siddharth Wagle.
c183bd8 is described below

commit c183bd8e2009c41ca9bdde964ec7e428dacc0c03
Author: Siddharth <swa...@hortonworks.com>
AuthorDate: Thu May 16 15:13:10 2019 -0700

    HDDS-1527. HDDS Datanode start fails due to datanode.id file read error. 
Contributed by Siddharth Wagle.
    
    This closes #822.
---
 .../container/common/helpers/ContainerUtils.java   | 19 ++++++--
 .../apache/hadoop/ozone/TestMiniOzoneCluster.java  | 54 +++++++++++++---------
 2 files changed, 49 insertions(+), 24 deletions(-)

diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
index 770435e..ff6dec8 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java
@@ -24,6 +24,7 @@ import static 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Res
 import static 
org.apache.hadoop.ozone.container.common.impl.ContainerData.CHARSET_ENCODING;
 
 import java.io.File;
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.file.Paths;
 import java.security.MessageDigest;
@@ -35,6 +36,7 @@ import org.apache.hadoop.hdds.protocol.DatanodeDetails;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
 import 
org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import 
org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.container.common.impl.ContainerData;
@@ -51,6 +53,9 @@ import com.google.common.base.Preconditions;
  */
 public final class ContainerUtils {
 
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ContainerUtils.class);
+
   private ContainerUtils() {
     //never constructed.
   }
@@ -198,7 +203,7 @@ public final class ContainerUtils {
         throw new IOException("Unable to overwrite the datanode ID file.");
       }
     } else {
-      if(!path.getParentFile().exists() &&
+      if (!path.getParentFile().exists() &&
           !path.getParentFile().mkdirs()) {
         throw new IOException("Unable to create datanode ID directories.");
       }
@@ -221,8 +226,16 @@ public final class ContainerUtils {
     try {
       return DatanodeIdYaml.readDatanodeIdFile(path);
     } catch (IOException e) {
-      throw new IOException("Failed to parse DatanodeDetails from "
-          + path.getAbsolutePath(), e);
+      LOG.warn("Error loading DatanodeDetails yaml from " +
+          path.getAbsolutePath(), e);
+      // Try to load as protobuf before giving up
+      try (FileInputStream in = new FileInputStream(path)) {
+        return DatanodeDetails.getFromProtoBuf(
+            HddsProtos.DatanodeDetailsProto.parseFrom(in));
+      } catch (IOException io) {
+        throw new IOException("Failed to parse DatanodeDetails from "
+            + path.getAbsolutePath(), io);
+      }
     }
   }
 
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestMiniOzoneCluster.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestMiniOzoneCluster.java
index e8db976..f3a5d2c 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestMiniOzoneCluster.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestMiniOzoneCluster.java
@@ -18,24 +18,38 @@
 
 package org.apache.hadoop.ozone;
 
+import static org.apache.hadoop.hdds.protocol.DatanodeDetails.Port;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
+import static 
org.apache.hadoop.ozone.OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+
 import org.apache.commons.lang3.RandomUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.HddsConfigKeys;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.TestUtils;
+import org.apache.hadoop.hdds.scm.XceiverClientGrpc;
+import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
 import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
 import org.apache.hadoop.ozone.container.common.SCMTestUtils;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
-import org.apache.hadoop.ozone.container.common.statemachine
-    .DatanodeStateMachine;
-import org.apache.hadoop.ozone.container.common.statemachine
-    .EndpointStateMachine;
+import 
org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
+import 
org.apache.hadoop.ozone.container.common.statemachine.EndpointStateMachine;
 import org.apache.hadoop.ozone.container.ozoneimpl.TestOzoneContainer;
-import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
-import org.apache.hadoop.hdds.scm.TestUtils;
-import org.apache.hadoop.hdds.scm.XceiverClientGrpc;
-import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
 import org.apache.hadoop.test.PathUtils;
 import org.apache.hadoop.test.TestGenericTestUtils;
 import org.junit.AfterClass;
@@ -44,19 +58,6 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.yaml.snakeyaml.Yaml;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-
-import static org.apache.hadoop.hdds.protocol.DatanodeDetails.Port;
-import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
-import static 
org.apache.hadoop.ozone.OzoneConfigKeys.DFS_CONTAINER_RATIS_IPC_RANDOM_PORT;
-import static org.junit.Assert.*;
-
 /**
  * Test cases for mini ozone cluster.
  */
@@ -167,6 +168,17 @@ public class TestMiniOzoneCluster {
     } catch (Exception e) {
       assertTrue(e instanceof IOException);
     }
+
+    // Test upgrade scenario - protobuf file instead of yaml
+    File protoFile = new File(WRITE_TMP, "valid-proto.id");
+    try (FileOutputStream out = new FileOutputStream(protoFile)) {
+      HddsProtos.DatanodeDetailsProto proto = id1.getProtoBufMessage();
+      proto.writeTo(out);
+    }
+    validId = ContainerUtils.readDatanodeDetailsFrom(protoFile);
+    assertEquals(validId.getCertSerialId(), certSerialId);
+    assertEquals(id1, validId);
+    assertEquals(id1.getProtoBufMessage(), validId.getProtoBufMessage());
   }
 
   @Test


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to