http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java
index 459dce7..ded50f9 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java
@@ -28,7 +28,9 @@ import java.security.KeyPairGenerator;
 import java.security.NoSuchAlgorithmException;
 import java.security.NoSuchProviderException;
 
-/** A class to generate Key Pair for use with Certificates. */
+/**
+ * A class to generate Key Pair for use with Certificates.
+ */
 public class HDDSKeyGenerator {
   private static final Logger LOG =
       LoggerFactory.getLogger(HDDSKeyGenerator.class);
@@ -44,7 +46,17 @@ public class HDDSKeyGenerator {
   }
 
   /**
+   * Constructor that takes a SecurityConfig as the Argument.
+   *
+   * @param config - SecurityConfig
+   */
+  public HDDSKeyGenerator(SecurityConfig config) {
+    this.securityConfig = config;
+  }
+
+  /**
    * Returns the Security config used for this object.
+   *
    * @return SecurityConfig
    */
   public SecurityConfig getSecurityConfig() {
@@ -55,10 +67,10 @@ public class HDDSKeyGenerator {
    * Use Config to generate key.
    *
    * @return KeyPair
-   * @throws NoSuchProviderException - On Error, due to missing Java
-   * dependencies.
+   * @throws NoSuchProviderException  - On Error, due to missing Java
+   *                                  dependencies.
    * @throws NoSuchAlgorithmException - On Error,  due to missing Java
-   * dependencies.
+   *                                  dependencies.
    */
   public KeyPair generateKey() throws NoSuchProviderException,
       NoSuchAlgorithmException {
@@ -71,10 +83,10 @@ public class HDDSKeyGenerator {
    *
    * @param size - int, valid key sizes.
    * @return KeyPair
-   * @throws NoSuchProviderException - On Error, due to missing Java
-   * dependencies.
+   * @throws NoSuchProviderException  - On Error, due to missing Java
+   *                                  dependencies.
    * @throws NoSuchAlgorithmException - On Error,  due to missing Java
-   * dependencies.
+   *                                  dependencies.
    */
   public KeyPair generateKey(int size) throws
       NoSuchProviderException, NoSuchAlgorithmException {
@@ -89,10 +101,10 @@ public class HDDSKeyGenerator {
    * @param algorithm - Algorithm to use
    * @param provider - Security provider.
    * @return KeyPair.
-   * @throws NoSuchProviderException - On Error, due to missing Java
-   * dependencies.
+   * @throws NoSuchProviderException  - On Error, due to missing Java
+   *                                  dependencies.
    * @throws NoSuchAlgorithmException - On Error,  due to missing Java
-   * dependencies.
+   *                                  dependencies.
    */
   public KeyPair generateKey(int size, String algorithm, String provider)
       throws NoSuchProviderException, NoSuchAlgorithmException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyPEMWriter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyPEMWriter.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyPEMWriter.java
deleted file mode 100644
index 95be1c4..0000000
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyPEMWriter.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-package org.apache.hadoop.hdds.security.x509.keys;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import java.nio.charset.Charset;
-import java.nio.charset.StandardCharsets;
-import org.apache.commons.io.output.FileWriterWithEncoding;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdds.security.x509.SecurityConfig;
-import org.bouncycastle.util.io.pem.PemObject;
-import org.bouncycastle.util.io.pem.PemWriter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.FileSystems;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.attribute.PosixFilePermission;
-import java.security.KeyPair;
-import java.util.Set;
-import java.util.function.Supplier;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE;
-import static java.nio.file.attribute.PosixFilePermission.OWNER_READ;
-import static java.nio.file.attribute.PosixFilePermission.OWNER_WRITE;
-
-/**
- * We store all Key material in good old PEM files.
- * This helps in avoiding dealing will persistent
- * Java KeyStore issues. Also when debugging,
- * general tools like OpenSSL can be used to read and
- * decode these files.
- */
-public class HDDSKeyPEMWriter {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(HDDSKeyPEMWriter.class);
-  private final Path location;
-  private final SecurityConfig securityConfig;
-  private Set<PosixFilePermission> permissionSet =
-      Stream.of(OWNER_READ, OWNER_WRITE,  OWNER_EXECUTE)
-          .collect(Collectors.toSet());
-  private Supplier<Boolean> isPosixFileSystem;
-  public final static String PRIVATE_KEY = "PRIVATE KEY";
-  public final static String PUBLIC_KEY = "PUBLIC KEY";
-  public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
-  /*
-    Creates an HDDS Key Writer.
-
-    @param configuration - Configuration
-   */
-  public HDDSKeyPEMWriter(Configuration configuration) throws IOException {
-    Preconditions.checkNotNull(configuration, "Config cannot be null");
-    this.securityConfig = new SecurityConfig(configuration);
-    isPosixFileSystem = HDDSKeyPEMWriter::isPosix;
-    this.location = securityConfig.getKeyLocation();
-  }
-
-  /**
-   * Checks if File System supports posix style security permissions.
-   *
-   * @return True if it supports posix.
-   */
-  private static Boolean isPosix() {
-    return FileSystems.getDefault().supportedFileAttributeViews()
-        .contains("posix");
-  }
-
-  /**
-   * Returns the Permission set.
-   * @return Set
-   */
-  @VisibleForTesting
-  public Set<PosixFilePermission> getPermissionSet() {
-    return permissionSet;
-  }
-
-  /**
-   * Returns the Security config used for this object.
-   * @return SecurityConfig
-   */
-  public SecurityConfig getSecurityConfig() {
-    return securityConfig;
-  }
-
-  /**
-   * This function is used only for testing.
-   *
-   * @param isPosixFileSystem - Sets a boolean function for mimicking
-   * files systems that are not posix.
-   */
-  @VisibleForTesting
-  public void setIsPosixFileSystem(Supplier<Boolean> isPosixFileSystem) {
-    this.isPosixFileSystem = isPosixFileSystem;
-  }
-
-  /**
-   * Writes a given key using the default config options.
-   *
-   * @param keyPair - Key Pair to write to file.
-   * @throws IOException - On I/O failure.
-   */
-  public void writeKey(KeyPair keyPair) throws IOException {
-    writeKey(location, keyPair, securityConfig.getPrivateKeyFileName(),
-        securityConfig.getPublicKeyFileName(), false);
-  }
-
-  /**
-   * Writes a given key using default config options.
-   *
-   * @param keyPair - Key pair to write
-   * @param overwrite - Overwrites the keys if they already exist.
-   * @throws IOException - On I/O failure.
-   */
-  public void writeKey(KeyPair keyPair, boolean overwrite) throws IOException {
-    writeKey(location, keyPair, securityConfig.getPrivateKeyFileName(),
-        securityConfig.getPublicKeyFileName(), overwrite);
-  }
-
-  /**
-   * Writes a given key using default config options.
-   *
-   * @param basePath - The location to write to, override the config values.
-   * @param keyPair - Key pair to write
-   * @param overwrite - Overwrites the keys if they already exist.
-   * @throws IOException - On I/O failure.
-   */
-  public void writeKey(Path basePath, KeyPair keyPair, boolean overwrite)
-      throws IOException {
-    writeKey(basePath, keyPair, securityConfig.getPrivateKeyFileName(),
-        securityConfig.getPublicKeyFileName(), overwrite);
-  }
-
-  /**
-   * Helper function that actually writes data to the files.
-   *
-   * @param basePath - base path to write key
-   * @param keyPair - Key pair to write to file.
-   * @param privateKeyFileName - private key file name.
-   * @param publicKeyFileName - public key file name.
-   * @param force - forces overwriting the keys.
-   * @throws IOException - On I/O failure.
-   */
-  private synchronized void writeKey(Path basePath, KeyPair keyPair,
-      String privateKeyFileName, String publicKeyFileName, boolean force)
-      throws IOException {
-    checkPreconditions(basePath);
-
-    File privateKeyFile =
-        Paths.get(location.toString(), privateKeyFileName).toFile();
-    File publicKeyFile =
-        Paths.get(location.toString(), publicKeyFileName).toFile();
-    checkKeyFile(privateKeyFile, force, publicKeyFile);
-
-    try (PemWriter privateKeyWriter = new PemWriter(new
-        FileWriterWithEncoding(privateKeyFile, DEFAULT_CHARSET))) {
-      privateKeyWriter.writeObject(
-          new PemObject(PRIVATE_KEY, keyPair.getPrivate().getEncoded()));
-    }
-
-    try (PemWriter publicKeyWriter = new PemWriter(new
-        FileWriterWithEncoding(publicKeyFile, DEFAULT_CHARSET))) {
-      publicKeyWriter.writeObject(
-          new PemObject(PUBLIC_KEY, keyPair.getPublic().getEncoded()));
-    }
-    Files.setPosixFilePermissions(privateKeyFile.toPath(), permissionSet);
-    Files.setPosixFilePermissions(publicKeyFile.toPath(), permissionSet);
-  }
-
-  /**
-   * Checks if private and public key file already exists. Throws IOException
-   * if file exists and force flag is set to false, else will delete the
-   * existing file.
-   *
-   * @param privateKeyFile - Private key file.
-   * @param force - forces overwriting the keys.
-   * @param publicKeyFile - public key file.
-   * @throws IOException - On I/O failure.
-   */
-  private void checkKeyFile(File privateKeyFile, boolean force,
-      File publicKeyFile) throws IOException {
-    if (privateKeyFile.exists() && force) {
-      if (!privateKeyFile.delete()) {
-        throw new IOException("Unable to delete private key file.");
-      }
-    }
-
-    if (publicKeyFile.exists() && force) {
-      if (!publicKeyFile.delete()) {
-        throw new IOException("Unable to delete public key file.");
-      }
-    }
-
-    if (privateKeyFile.exists()) {
-      throw new IOException("Private Key file already exists.");
-    }
-
-    if (publicKeyFile.exists()) {
-      throw new IOException("Public Key file already exists.");
-    }
-  }
-
-  /**
-   * Checks if base path exists and sets file permissions.
-   *
-   * @param basePath - base path to write key
-   * @throws IOException - On I/O failure.
-   */
-  private void checkPreconditions(Path basePath) throws IOException {
-    Preconditions.checkNotNull(basePath, "Base path cannot be null");
-    if (!isPosixFileSystem.get()) {
-      LOG.error("Keys cannot be stored securely without POSIX file system "
-          + "support for now.");
-      throw new IOException("Unsupported File System for pem file.");
-    }
-
-    if (Files.exists(basePath)) {
-      // Not the end of the world if we reset the permissions on an existing
-      // directory.
-      Files.setPosixFilePermissions(basePath, permissionSet);
-    } else {
-      boolean success = basePath.toFile().mkdirs();
-      if (!success) {
-        LOG.error("Unable to create the directory for the "
-            + "location. Location: {}", basePath);
-        throw new IOException("Unable to create the directory for the "
-            + "location. Location:" + basePath);
-      }
-      Files.setPosixFilePermissions(basePath, permissionSet);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
new file mode 100644
index 0000000..1d45ef1
--- /dev/null
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
@@ -0,0 +1,337 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.hadoop.hdds.security.x509.keys;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.output.FileWriterWithEncoding;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.bouncycastle.util.io.pem.PemObject;
+import org.bouncycastle.util.io.pem.PemReader;
+import org.bouncycastle.util.io.pem.PemWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.StringReader;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+import java.security.KeyFactory;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.PKCS8EncodedKeySpec;
+import java.util.Set;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static java.nio.file.attribute.PosixFilePermission.OWNER_EXECUTE;
+import static java.nio.file.attribute.PosixFilePermission.OWNER_READ;
+import static java.nio.file.attribute.PosixFilePermission.OWNER_WRITE;
+
+/**
+ * We store all Key material in good old PEM files. This helps in avoiding
+ * dealing will persistent Java KeyStore issues. Also when debugging, general
+ * tools like OpenSSL can be used to read and decode these files.
+ */
+public class KeyCodec {
+  public final static String PRIVATE_KEY = "PRIVATE KEY";
+  public final static String PUBLIC_KEY = "PUBLIC KEY";
+  public final static Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
+  private final static  Logger LOG =
+      LoggerFactory.getLogger(KeyCodec.class);
+  private final Path location;
+  private final SecurityConfig securityConfig;
+  private Set<PosixFilePermission> permissionSet =
+      Stream.of(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE)
+          .collect(Collectors.toSet());
+  private Supplier<Boolean> isPosixFileSystem;
+
+  /**
+   * Creates an KeyCodec.
+   *
+   * @param config - Security Config.
+   * @param component - Component String.
+   */
+  public KeyCodec(SecurityConfig config, String component) {
+    this.securityConfig = config;
+    isPosixFileSystem = KeyCodec::isPosix;
+    this.location = securityConfig.getKeyLocation(component);
+  }
+
+  /**
+   * Creates an HDDS Key Writer.
+   *
+   * @param configuration - Configuration
+   */
+  public KeyCodec(Configuration configuration) {
+    Preconditions.checkNotNull(configuration, "Config cannot be null");
+    this.securityConfig = new SecurityConfig(configuration);
+    isPosixFileSystem = KeyCodec::isPosix;
+    this.location = securityConfig.getKeyLocation();
+  }
+
+  /**
+   * Checks if File System supports posix style security permissions.
+   *
+   * @return True if it supports posix.
+   */
+  private static Boolean isPosix() {
+    return FileSystems.getDefault().supportedFileAttributeViews()
+        .contains("posix");
+  }
+
+  /**
+   * Returns the Permission set.
+   *
+   * @return Set
+   */
+  @VisibleForTesting
+  public Set<PosixFilePermission> getPermissionSet() {
+    return permissionSet;
+  }
+
+  /**
+   * Returns the Security config used for this object.
+   *
+   * @return SecurityConfig
+   */
+  public SecurityConfig getSecurityConfig() {
+    return securityConfig;
+  }
+
+  /**
+   * This function is used only for testing.
+   *
+   * @param isPosixFileSystem - Sets a boolean function for mimicking files
+   * systems that are not posix.
+   */
+  @VisibleForTesting
+  public void setIsPosixFileSystem(Supplier<Boolean> isPosixFileSystem) {
+    this.isPosixFileSystem = isPosixFileSystem;
+  }
+
+  /**
+   * Writes a given key using the default config options.
+   *
+   * @param keyPair - Key Pair to write to file.
+   * @throws IOException - On I/O failure.
+   */
+  public void writeKey(KeyPair keyPair) throws IOException {
+    writeKey(location, keyPair, securityConfig.getPrivateKeyFileName(),
+        securityConfig.getPublicKeyFileName(), false);
+  }
+
+  /**
+   * Writes a given key using default config options.
+   *
+   * @param keyPair - Key pair to write
+   * @param overwrite - Overwrites the keys if they already exist.
+   * @throws IOException - On I/O failure.
+   */
+  public void writeKey(KeyPair keyPair, boolean overwrite) throws IOException {
+    writeKey(location, keyPair, securityConfig.getPrivateKeyFileName(),
+        securityConfig.getPublicKeyFileName(), overwrite);
+  }
+
+  /**
+   * Writes a given key using default config options.
+   *
+   * @param basePath - The location to write to, override the config values.
+   * @param keyPair - Key pair to write
+   * @param overwrite - Overwrites the keys if they already exist.
+   * @throws IOException - On I/O failure.
+   */
+  public void writeKey(Path basePath, KeyPair keyPair, boolean overwrite)
+      throws IOException {
+    writeKey(basePath, keyPair, securityConfig.getPrivateKeyFileName(),
+        securityConfig.getPublicKeyFileName(), overwrite);
+  }
+
+  /**
+   * Reads a Private Key from the PEM Encoded Store.
+   *
+   * @param basePath - Base Path, Directory where the Key is stored.
+   * @param keyFileName - File Name of the private key
+   * @return PrivateKey Object.
+   * @throws IOException - on Error.
+   */
+  private PKCS8EncodedKeySpec readKey(Path basePath, String keyFileName)
+      throws IOException {
+    File fileName = Paths.get(basePath.toString(), keyFileName).toFile();
+    String keyData = FileUtils.readFileToString(fileName, DEFAULT_CHARSET);
+    final byte[] pemContent;
+    try (PemReader pemReader = new PemReader(new StringReader(keyData))) {
+      PemObject keyObject = pemReader.readPemObject();
+      pemContent = keyObject.getContent();
+    }
+    return new PKCS8EncodedKeySpec(pemContent);
+  }
+
+  /**
+   * Returns a Private Key from a PEM encoded file.
+   *
+   * @param basePath - base path
+   * @param privateKeyFileName - private key file name.
+   * @return PrivateKey
+   * @throws InvalidKeySpecException  - on Error.
+   * @throws NoSuchAlgorithmException - on Error.
+   * @throws IOException              - on Error.
+   */
+  public PrivateKey readPrivateKey(Path basePath, String privateKeyFileName)
+      throws InvalidKeySpecException, NoSuchAlgorithmException, IOException {
+    PKCS8EncodedKeySpec encodedKeySpec = readKey(basePath, privateKeyFileName);
+    final KeyFactory keyFactory =
+        KeyFactory.getInstance(securityConfig.getProvider());
+    final PrivateKey privateKey =
+        keyFactory.generatePrivate(encodedKeySpec);
+    return privateKey;
+  }
+
+  /**
+   * Returns a public key from a PEM encoded file.
+   *
+   * @param basePath - base path.
+   * @param publicKeyFileName - public key file name.
+   * @return PublicKey
+   * @throws NoSuchAlgorithmException - on Error.
+   * @throws InvalidKeySpecException  - on Error.
+   * @throws IOException              - on Error.
+   */
+  public PublicKey readPublicKey(Path basePath, String publicKeyFileName)
+      throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
+    PKCS8EncodedKeySpec encodedKeySpec = readKey(basePath, publicKeyFileName);
+    final KeyFactory keyFactory =
+        KeyFactory.getInstance(securityConfig.getProvider());
+    final PublicKey publicKey =
+        keyFactory.generatePublic(encodedKeySpec);
+    return publicKey;
+  }
+
+  /**
+   * Helper function that actually writes data to the files.
+   *
+   * @param basePath - base path to write key
+   * @param keyPair - Key pair to write to file.
+   * @param privateKeyFileName - private key file name.
+   * @param publicKeyFileName - public key file name.
+   * @param force - forces overwriting the keys.
+   * @throws IOException - On I/O failure.
+   */
+  private synchronized void writeKey(Path basePath, KeyPair keyPair,
+      String privateKeyFileName, String publicKeyFileName, boolean force)
+      throws IOException {
+    checkPreconditions(basePath);
+
+    File privateKeyFile =
+        Paths.get(location.toString(), privateKeyFileName).toFile();
+    File publicKeyFile =
+        Paths.get(location.toString(), publicKeyFileName).toFile();
+    checkKeyFile(privateKeyFile, force, publicKeyFile);
+
+    try (PemWriter privateKeyWriter = new PemWriter(new
+        FileWriterWithEncoding(privateKeyFile, DEFAULT_CHARSET))) {
+      privateKeyWriter.writeObject(
+          new PemObject(PRIVATE_KEY, keyPair.getPrivate().getEncoded()));
+    }
+
+    try (PemWriter publicKeyWriter = new PemWriter(new
+        FileWriterWithEncoding(publicKeyFile, DEFAULT_CHARSET))) {
+      publicKeyWriter.writeObject(
+          new PemObject(PUBLIC_KEY, keyPair.getPublic().getEncoded()));
+    }
+    Files.setPosixFilePermissions(privateKeyFile.toPath(), permissionSet);
+    Files.setPosixFilePermissions(publicKeyFile.toPath(), permissionSet);
+  }
+
+  /**
+   * Checks if private and public key file already exists. Throws IOException 
if
+   * file exists and force flag is set to false, else will delete the existing
+   * file.
+   *
+   * @param privateKeyFile - Private key file.
+   * @param force - forces overwriting the keys.
+   * @param publicKeyFile - public key file.
+   * @throws IOException - On I/O failure.
+   */
+  private void checkKeyFile(File privateKeyFile, boolean force,
+      File publicKeyFile) throws IOException {
+    if (privateKeyFile.exists() && force) {
+      if (!privateKeyFile.delete()) {
+        throw new IOException("Unable to delete private key file.");
+      }
+    }
+
+    if (publicKeyFile.exists() && force) {
+      if (!publicKeyFile.delete()) {
+        throw new IOException("Unable to delete public key file.");
+      }
+    }
+
+    if (privateKeyFile.exists()) {
+      throw new IOException("Private Key file already exists.");
+    }
+
+    if (publicKeyFile.exists()) {
+      throw new IOException("Public Key file already exists.");
+    }
+  }
+
+  /**
+   * Checks if base path exists and sets file permissions.
+   *
+   * @param basePath - base path to write key
+   * @throws IOException - On I/O failure.
+   */
+  private void checkPreconditions(Path basePath) throws IOException {
+    Preconditions.checkNotNull(basePath, "Base path cannot be null");
+    if (!isPosixFileSystem.get()) {
+      LOG.error("Keys cannot be stored securely without POSIX file system "
+          + "support for now.");
+      throw new IOException("Unsupported File System for pem file.");
+    }
+
+    if (Files.exists(basePath)) {
+      // Not the end of the world if we reset the permissions on an existing
+      // directory.
+      Files.setPosixFilePermissions(basePath, permissionSet);
+    } else {
+      boolean success = basePath.toFile().mkdirs();
+      if (!success) {
+        LOG.error("Unable to create the directory for the "
+            + "location. Location: {}", basePath);
+        throw new IOException("Unable to create the directory for the "
+            + "location. Location:" + basePath);
+      }
+      Files.setPosixFilePermissions(basePath, permissionSet);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
index 1cf39b2..ad26f77 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
@@ -62,7 +62,7 @@ public class StorageInfo {
    * @param cT
    *          Cluster creation Time
 
-   * @throws IOException
+   * @throws IOException - on Error.
    */
   public StorageInfo(NodeType type, String cid, long cT)
       throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
new file mode 100644
index 0000000..0e98ba7
--- /dev/null
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.IOException;
+import java.security.cert.CertificateException;
+import java.util.function.Consumer;
+
+import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
+import static org.junit.Assert.*;
+
+/**
+ * Tests the Default CA Server.
+ */
+public class TestDefaultCAServer {
+  private static OzoneConfiguration conf = new OzoneConfiguration();
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+  @Before
+  public void init() throws IOException {
+    conf.set(OZONE_METADATA_DIRS, temporaryFolder.newFolder().toString());
+  }
+
+  @Test
+  public void testInit() throws SCMSecurityException, CertificateException,
+      IOException {
+    SecurityConfig securityConfig = new SecurityConfig(conf);
+    CertificateServer testCA = new DefaultCAServer("testCA",
+        RandomStringUtils.randomAlphabetic(4),
+        RandomStringUtils.randomAlphabetic(4));
+    testCA.init(securityConfig, CertificateServer.CAType.SELF_SIGNED_CA);
+    X509CertificateHolder first = testCA.getCACertificate();
+    assertNotNull(first);
+    //Init is idempotent.
+    testCA.init(securityConfig, CertificateServer.CAType.SELF_SIGNED_CA);
+    X509CertificateHolder second = testCA.getCACertificate();
+    assertEquals(first, second);
+
+    // we only support Self Signed CA for now.
+    try {
+      testCA.init(securityConfig, CertificateServer.CAType.INTERMEDIARY_CA);
+      fail("code should not reach here, exception should have been thrown.");
+    } catch (IllegalStateException e) {
+      // This is a run time exception, hence it is not caught by the junit
+      // expected Exception.
+      assertTrue(e.toString().contains("Not implemented"));
+    }
+  }
+
+  @Test
+  public void testMissingCertificate() {
+    SecurityConfig securityConfig = new SecurityConfig(conf);
+    CertificateServer testCA = new DefaultCAServer("testCA",
+        RandomStringUtils.randomAlphabetic(4),
+        RandomStringUtils.randomAlphabetic(4));
+    Consumer<SecurityConfig> caInitializer =
+        ((DefaultCAServer) testCA).processVerificationStatus(
+        DefaultCAServer.VerificationStatus.MISSING_CERTIFICATE);
+    try {
+
+      caInitializer.accept(securityConfig);
+      fail("code should not reach here, exception should have been thrown.");
+    } catch (IllegalStateException e) {
+      // This also is a runtime exception. Hence not caught by junit expected
+      // exception.
+      assertTrue(e.toString().contains("Missing Root Certs"));
+    }
+  }
+
+  @Test
+  public void testMissingKey() {
+    SecurityConfig securityConfig = new SecurityConfig(conf);
+    CertificateServer testCA = new DefaultCAServer("testCA",
+        RandomStringUtils.randomAlphabetic(4),
+        RandomStringUtils.randomAlphabetic(4));
+    Consumer<SecurityConfig> caInitializer =
+        ((DefaultCAServer) testCA).processVerificationStatus(
+            DefaultCAServer.VerificationStatus.MISSING_KEYS);
+    try {
+
+      caInitializer.accept(securityConfig);
+      fail("code should not reach here, exception should have been thrown.");
+    } catch (IllegalStateException e) {
+      // This also is a runtime exception. Hence not caught by junit expected
+      // exception.
+      assertTrue(e.toString().contains("Missing Keys"));
+    }
+
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/package-info.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/package-info.java
new file mode 100644
index 0000000..1d20a78
--- /dev/null
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/**
+ * Tests for Default CA.
+ */
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCertificateCodec.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCertificateCodec.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCertificateCodec.java
new file mode 100644
index 0000000..9ac956f
--- /dev/null
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCertificateCodec.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.utils;
+
+import org.apache.commons.lang3.RandomStringUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
+import 
org.apache.hadoop.hdds.security.x509.certificates.utils.SelfSignedCertificate;
+import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.File;
+import java.io.IOException;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.cert.CertificateException;
+import java.security.cert.X509Certificate;
+import java.time.LocalDate;
+import java.time.temporal.ChronoUnit;
+
+import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests the Certificate codecs.
+ */
+public class TestCertificateCodec {
+  private static OzoneConfiguration conf = new OzoneConfiguration();
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+  @Before
+  public void init() throws IOException {
+    conf.set(OZONE_METADATA_DIRS, temporaryFolder.newFolder().toString());
+  }
+
+  /**
+   * This test converts a X509Certificate Holder object to a PEM encoded 
String,
+   * then creates a new X509Certificate object to verify that we are able to
+   * serialize and deserialize correctly. we follow up with converting these
+   * objects to standard JCA x509Certificate objects.
+   *
+   * @throws NoSuchProviderException  - on Error.
+   * @throws NoSuchAlgorithmException - on Error.
+   * @throws IOException              - on Error.
+   * @throws SCMSecurityException     - on Error.
+   * @throws CertificateException     - on Error.
+   */
+  @Test
+  public void testGetPEMEncodedString()
+      throws NoSuchProviderException, NoSuchAlgorithmException,
+      IOException, SCMSecurityException, CertificateException {
+    HDDSKeyGenerator keyGenerator =
+        new HDDSKeyGenerator(conf);
+    X509CertificateHolder cert =
+        SelfSignedCertificate.newBuilder()
+            .setSubject(RandomStringUtils.randomAlphabetic(4))
+            .setClusterID(RandomStringUtils.randomAlphabetic(4))
+            .setScmID(RandomStringUtils.randomAlphabetic(4))
+            .setBeginDate(LocalDate.now())
+            .setEndDate(LocalDate.now().plus(1, ChronoUnit.DAYS))
+            .setConfiguration(keyGenerator.getSecurityConfig()
+                .getConfiguration())
+            .setKey(keyGenerator.generateKey())
+            .makeCA()
+            .build();
+    CertificateCodec codec = new CertificateCodec(conf);
+    String pemString = codec.getPEMEncodedString(cert);
+    assertTrue(pemString.startsWith(CertificateCodec.BEGIN_CERT));
+    assertTrue(pemString.endsWith(CertificateCodec.END_CERT + "\n"));
+
+    // Read back the certificate and verify that all the comparisons pass.
+    X509CertificateHolder newCert =
+        codec.getCertificateHolder(codec.getX509Certificate(pemString));
+    assertEquals(cert, newCert);
+
+    // Just make sure we can decode both these classes to Java Std. lIb 
classes.
+    X509Certificate firstCert = CertificateCodec.getX509Certificate(cert);
+    X509Certificate secondCert = CertificateCodec.getX509Certificate(newCert);
+    assertEquals(firstCert, secondCert);
+  }
+
+  /**
+   * tests writing and reading certificates in PEM encoded form.
+   *
+   * @throws NoSuchProviderException  - on Error.
+   * @throws NoSuchAlgorithmException - on Error.
+   * @throws IOException              - on Error.
+   * @throws SCMSecurityException     - on Error.
+   * @throws CertificateException     - on Error.
+   */
+  @Test
+  public void testwriteCertificate() throws NoSuchProviderException,
+      NoSuchAlgorithmException, IOException, SCMSecurityException,
+      CertificateException {
+    HDDSKeyGenerator keyGenerator =
+        new HDDSKeyGenerator(conf);
+    X509CertificateHolder cert =
+        SelfSignedCertificate.newBuilder()
+            .setSubject(RandomStringUtils.randomAlphabetic(4))
+            .setClusterID(RandomStringUtils.randomAlphabetic(4))
+            .setScmID(RandomStringUtils.randomAlphabetic(4))
+            .setBeginDate(LocalDate.now())
+            .setEndDate(LocalDate.now().plus(1, ChronoUnit.DAYS))
+            .setConfiguration(keyGenerator.getSecurityConfig()
+                .getConfiguration())
+            .setKey(keyGenerator.generateKey())
+            .makeCA()
+            .build();
+    CertificateCodec codec = new CertificateCodec(conf);
+    String pemString = codec.getPEMEncodedString(cert);
+    File basePath = temporaryFolder.newFolder();
+    if (!basePath.exists()) {
+      Assert.assertTrue(basePath.mkdirs());
+    }
+    codec.writeCertificate(basePath.toPath(), "pemcertificate.crt",
+        pemString, false);
+    X509CertificateHolder certHolder =
+        codec.readCertificate(basePath.toPath(), "pemcertificate.crt");
+    assertNotNull(certHolder);
+    assertEquals(cert.getSerialNumber(), certHolder.getSerialNumber());
+  }
+
+  /**
+   * Tests reading and writing certificates in DER form.
+   *
+   * @throws IOException              - on Error.
+   * @throws SCMSecurityException     - on Error.
+   * @throws CertificateException     - on Error.
+   * @throws NoSuchProviderException  - on Error.
+   * @throws NoSuchAlgorithmException - on Error.
+   */
+  @Test
+  public void testwriteCertificateDefault()
+      throws IOException, SCMSecurityException, CertificateException,
+      NoSuchProviderException, NoSuchAlgorithmException {
+    HDDSKeyGenerator keyGenerator =
+        new HDDSKeyGenerator(conf);
+    X509CertificateHolder cert =
+        SelfSignedCertificate.newBuilder()
+            .setSubject(RandomStringUtils.randomAlphabetic(4))
+            .setClusterID(RandomStringUtils.randomAlphabetic(4))
+            .setScmID(RandomStringUtils.randomAlphabetic(4))
+            .setBeginDate(LocalDate.now())
+            .setEndDate(LocalDate.now().plus(1, ChronoUnit.DAYS))
+            .setConfiguration(keyGenerator.getSecurityConfig()
+                .getConfiguration())
+            .setKey(keyGenerator.generateKey())
+            .makeCA()
+            .build();
+    CertificateCodec codec = new CertificateCodec(conf);
+    codec.writeCertificate(cert);
+    X509CertificateHolder certHolder = codec.readCertificate();
+    assertNotNull(certHolder);
+    assertEquals(cert.getSerialNumber(), certHolder.getSerialNumber());
+  }
+
+  /**
+   * Tests writing to non-default certificate file name.
+   *
+   * @throws IOException              - on Error.
+   * @throws SCMSecurityException     - on Error.
+   * @throws NoSuchProviderException  - on Error.
+   * @throws NoSuchAlgorithmException - on Error.
+   * @throws CertificateException     - on Error.
+   */
+  @Test
+  public void writeCertificate2() throws IOException, SCMSecurityException,
+      NoSuchProviderException, NoSuchAlgorithmException, CertificateException {
+    HDDSKeyGenerator keyGenerator =
+        new HDDSKeyGenerator(conf);
+    X509CertificateHolder cert =
+        SelfSignedCertificate.newBuilder()
+            .setSubject(RandomStringUtils.randomAlphabetic(4))
+            .setClusterID(RandomStringUtils.randomAlphabetic(4))
+            .setScmID(RandomStringUtils.randomAlphabetic(4))
+            .setBeginDate(LocalDate.now())
+            .setEndDate(LocalDate.now().plus(1, ChronoUnit.DAYS))
+            .setConfiguration(keyGenerator.getSecurityConfig()
+                .getConfiguration())
+            .setKey(keyGenerator.generateKey())
+            .makeCA()
+            .build();
+    CertificateCodec codec =
+        new CertificateCodec(keyGenerator.getSecurityConfig(), "ca");
+    codec.writeCertificate(cert, "newcert.crt", false);
+    // Rewrite with force support
+    codec.writeCertificate(cert, "newcert.crt", true);
+    X509CertificateHolder x509CertificateHolder =
+        codec.readCertificate(codec.getLocation(), "newcert.crt");
+    assertNotNull(x509CertificateHolder);
+
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
index 9328c50..d234b66 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -6,20 +6,22 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
+ *
  */
-package org.apache.hadoop.hdds.security.x509.certificates;
+package org.apache.hadoop.hdds.security.x509.certificate.utils;
 
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import 
org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
 import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
 import org.apache.hadoop.hdds.security.x509.keys.SecurityUtil;
 import org.bouncycastle.asn1.ASN1Sequence;
@@ -45,12 +47,15 @@ import java.util.UUID;
 
 import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
 
+/**
+ * Certificate Signing Request.
+ */
 public class TestCertificateSignRequest {
 
-  private SecurityConfig securityConfig;
   private static OzoneConfiguration conf = new OzoneConfiguration();
   @Rule
   public TemporaryFolder temporaryFolder = new TemporaryFolder();
+  private SecurityConfig securityConfig;
 
   @Before
   public void init() throws IOException {
@@ -257,7 +262,7 @@ public class TestCertificateSignRequest {
     // Verify CSR with attribute for extensions
     Assert.assertEquals(1, csr.getAttributes().length);
   }
-  
+
   @Test
   public void testCsrSerialization() throws NoSuchProviderException,
       NoSuchAlgorithmException, SCMSecurityException, IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
index e581dc8..416cc61 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
@@ -5,7 +5,7 @@
  * regarding copyright ownership.  The ASF licenses this file
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
+ * with the License.  You may obtain a copy of the License at
  *
  *      http://www.apache.org/licenses/LICENSE-2.0
  *
@@ -17,11 +17,12 @@
  *
  */
 
-package org.apache.hadoop.hdds.security.x509.certificates;
+package org.apache.hadoop.hdds.security.x509.certificate.utils;
 
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import 
org.apache.hadoop.hdds.security.x509.certificates.utils.SelfSignedCertificate;
 import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
 import org.bouncycastle.asn1.x509.Extension;
 import org.bouncycastle.cert.X509CertificateHolder;
@@ -41,8 +42,8 @@ import java.security.NoSuchProviderException;
 import java.security.SignatureException;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
-import java.time.Duration;
-import java.time.Instant;
+import java.time.LocalDate;
+import java.time.temporal.ChronoUnit;
 import java.util.Date;
 import java.util.UUID;
 
@@ -52,10 +53,10 @@ import static 
org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
  * Test Class for Root Certificate generation.
  */
 public class TestRootCertificate {
-  private SecurityConfig securityConfig;
   private static OzoneConfiguration conf = new OzoneConfiguration();
   @Rule
   public TemporaryFolder temporaryFolder = new TemporaryFolder();
+  private SecurityConfig securityConfig;
 
   @Before
   public void init() throws IOException {
@@ -67,10 +68,9 @@ public class TestRootCertificate {
   public void testAllFieldsAreExpected()
       throws SCMSecurityException, NoSuchProviderException,
       NoSuchAlgorithmException, CertificateException,
-      SignatureException, InvalidKeyException {
-    Instant now = Instant.now();
-    Date notBefore = Date.from(now);
-    Date notAfter = Date.from(now.plus(Duration.ofDays(365)));
+      SignatureException, InvalidKeyException, IOException {
+    LocalDate notBefore = LocalDate.now();
+    LocalDate notAfter = notBefore.plus(365, ChronoUnit.DAYS);
     String clusterID = UUID.randomUUID().toString();
     String scmID = UUID.randomUUID().toString();
     String subject = "testRootCert";
@@ -96,13 +96,15 @@ public class TestRootCertificate {
 
 
     // Make sure that NotBefore is before the current Date
-    Date invalidDate = Date.from(now.minus(Duration.ofDays(1)));
+    Date invalidDate = java.sql.Date.valueOf(
+        notBefore.minus(1, ChronoUnit.DAYS));
     Assert.assertFalse(
         certificateHolder.getNotBefore()
             .before(invalidDate));
 
     //Make sure the end date is honored.
-    invalidDate = Date.from(now.plus(Duration.ofDays(366)));
+    invalidDate = java.sql.Date.valueOf(
+        notAfter.plus(1, ChronoUnit.DAYS));
     Assert.assertFalse(
         certificateHolder.getNotAfter()
             .after(invalidDate));
@@ -113,7 +115,8 @@ public class TestRootCertificate {
     Assert.assertEquals(certificateHolder.getIssuer().toString(), dnName);
     Assert.assertEquals(certificateHolder.getSubject().toString(), dnName);
 
-    // We did not ask for this Certificate to be a CA certificate, hence that
+    // We did not ask for this Certificate to be a CertificateServer
+    // certificate, hence that
     // extension should be null.
     Assert.assertNull(
         certificateHolder.getExtension(Extension.basicConstraints));
@@ -128,10 +131,9 @@ public class TestRootCertificate {
   @Test
   public void testCACert()
       throws SCMSecurityException, NoSuchProviderException,
-      NoSuchAlgorithmException {
-    Instant now = Instant.now();
-    Date notBefore = Date.from(now);
-    Date notAfter = Date.from(now.plus(Duration.ofDays(365)));
+      NoSuchAlgorithmException, IOException {
+    LocalDate notBefore = LocalDate.now();
+    LocalDate notAfter = notBefore.plus(365, ChronoUnit.DAYS);
     String clusterID = UUID.randomUUID().toString();
     String scmID = UUID.randomUUID().toString();
     String subject = "testRootCert";
@@ -151,7 +153,8 @@ public class TestRootCertificate {
             .makeCA();
 
     X509CertificateHolder certificateHolder = builder.build();
-    // This time we asked for a CA Certificate, make sure that extension is
+    // This time we asked for a CertificateServer Certificate, make sure that
+    // extension is
     // present and valid.
     Extension basicExt =
         certificateHolder.getExtension(Extension.basicConstraints);
@@ -167,10 +170,9 @@ public class TestRootCertificate {
   @Test
   public void testInvalidParamFails()
       throws SCMSecurityException, NoSuchProviderException,
-      NoSuchAlgorithmException {
-    Instant now = Instant.now();
-    Date notBefore = Date.from(now);
-    Date notAfter = Date.from(now.plus(Duration.ofDays(365)));
+      NoSuchAlgorithmException, IOException {
+    LocalDate notBefore = LocalDate.now();
+    LocalDate notAfter = notBefore.plus(365, ChronoUnit.DAYS);
     String clusterID = UUID.randomUUID().toString();
     String scmID = UUID.randomUUID().toString();
     String subject = "testRootCert";
@@ -253,6 +255,4 @@ public class TestRootCertificate {
     // Assert that we can create a certificate with all sane params.
     Assert.assertNotNull(builder.build());
   }
-
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/package-info.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/package-info.java
index c8a31fc..fffe1e5 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/package-info.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/package-info.java
@@ -5,7 +5,7 @@
  * regarding copyright ownership.  The ASF licenses this file
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
+ * with the License.  You may obtain a copy of the License at
  *
  *      http://www.apache.org/licenses/LICENSE-2.0
  *
@@ -19,4 +19,4 @@
 /**
  * Test classes for Certificate utilities.
  */
-package org.apache.hadoop.hdds.security.x509.certificates;
\ No newline at end of file
+package org.apache.hadoop.hdds.security.x509.certificate.utils;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyPEMWriter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyPEMWriter.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyPEMWriter.java
deleted file mode 100644
index db5d430..0000000
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestHDDSKeyPEMWriter.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.hadoop.hdds.security.x509.keys;
-
-import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.attribute.PosixFilePermission;
-import java.security.KeyFactory;
-import java.security.KeyPair;
-import java.security.NoSuchAlgorithmException;
-import java.security.NoSuchProviderException;
-import java.security.PrivateKey;
-import java.security.PublicKey;
-import java.security.spec.InvalidKeySpecException;
-import java.security.spec.PKCS8EncodedKeySpec;
-import java.security.spec.X509EncodedKeySpec;
-import java.util.Set;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.hdds.security.x509.SecurityConfig;
-import org.apache.hadoop.test.LambdaTestUtils;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-
-/**
- * Test class for HDDS pem writer.
- */
-public class TestHDDSKeyPEMWriter {
-
-  @Rule
-  public TemporaryFolder temporaryFolder = new TemporaryFolder();
-  private OzoneConfiguration configuration;
-  private HDDSKeyGenerator keyGenerator;
-  private String prefix;
-
-  @Before
-  public void init() throws IOException {
-    configuration = new OzoneConfiguration();
-    prefix = temporaryFolder.newFolder().toString();
-    configuration.set(HDDS_METADATA_DIR_NAME, prefix);
-    keyGenerator = new HDDSKeyGenerator(configuration);
-  }
-
-  /**
-   * Assert basic things like we are able to create a file, and the names are
-   * in expected format etc.
-   *
-   * @throws NoSuchProviderException - On Error, due to missing Java
-   * dependencies.
-   * @throws NoSuchAlgorithmException - On Error,  due to missing Java
-   * dependencies.
-   * @throws IOException - On I/O failure.
-   */
-  @Test
-  public void testWriteKey()
-      throws NoSuchProviderException, NoSuchAlgorithmException,
-      IOException, InvalidKeySpecException {
-    KeyPair keys = keyGenerator.generateKey();
-    HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
-    pemWriter.writeKey(keys);
-
-    // Assert that locations have been created.
-    Path keyLocation = pemWriter.getSecurityConfig().getKeyLocation();
-    Assert.assertTrue(keyLocation.toFile().exists());
-
-    // Assert that locations are created in the locations that we specified
-    // using the Config.
-    Assert.assertTrue(keyLocation.toString().startsWith(prefix));
-    Path privateKeyPath = Paths.get(keyLocation.toString(),
-        pemWriter.getSecurityConfig().getPrivateKeyFileName());
-    Assert.assertTrue(privateKeyPath.toFile().exists());
-    Path publicKeyPath = Paths.get(keyLocation.toString(),
-        pemWriter.getSecurityConfig().getPublicKeyFileName());
-    Assert.assertTrue(publicKeyPath.toFile().exists());
-
-    // Read the private key and test if the expected String in the PEM file
-    // format exists.
-    byte[] privateKey = Files.readAllBytes(privateKeyPath);
-    String privateKeydata = new String(privateKey, StandardCharsets.UTF_8);
-    Assert.assertTrue(privateKeydata.contains("PRIVATE KEY"));
-
-    // Read the public key and test if the expected String in the PEM file
-    // format exists.
-    byte[] publicKey = Files.readAllBytes(publicKeyPath);
-    String publicKeydata = new String(publicKey, StandardCharsets.UTF_8);
-    Assert.assertTrue(publicKeydata.contains("PUBLIC KEY"));
-
-    // Let us decode the PEM file and parse it back into binary.
-    KeyFactory kf = KeyFactory.getInstance(
-        pemWriter.getSecurityConfig().getKeyAlgo());
-
-    // Replace the PEM Human readable guards.
-    privateKeydata =
-        privateKeydata.replace("-----BEGIN PRIVATE KEY-----\n", "");
-    privateKeydata =
-        privateKeydata.replace("-----END PRIVATE KEY-----", "");
-
-    // Decode the bas64 to binary format and then use an ASN.1 parser to
-    // parse the binary format.
-
-    byte[] keyBytes = Base64.decodeBase64(privateKeydata);
-    PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
-    PrivateKey privateKeyDecoded = kf.generatePrivate(spec);
-    Assert.assertNotNull("Private Key should not be null",
-        privateKeyDecoded);
-
-    // Let us decode the public key and veriy that we can parse it back into
-    // binary.
-    publicKeydata =
-        publicKeydata.replace("-----BEGIN PUBLIC KEY-----\n", "");
-    publicKeydata =
-        publicKeydata.replace("-----END PUBLIC KEY-----", "");
-
-    keyBytes = Base64.decodeBase64(publicKeydata);
-    X509EncodedKeySpec pubKeyspec = new X509EncodedKeySpec(keyBytes);
-    PublicKey publicKeyDecoded = kf.generatePublic(pubKeyspec);
-    Assert.assertNotNull("Public Key should not be null",
-        publicKeyDecoded);
-
-    // Now let us assert the permissions on the Directories and files are as
-    // expected.
-    Set<PosixFilePermission> expectedSet = pemWriter.getPermissionSet();
-    Set<PosixFilePermission> currentSet =
-        Files.getPosixFilePermissions(privateKeyPath);
-    currentSet.removeAll(expectedSet);
-    Assert.assertEquals(0, currentSet.size());
-
-    currentSet =
-        Files.getPosixFilePermissions(publicKeyPath);
-    currentSet.removeAll(expectedSet);
-    Assert.assertEquals(0, currentSet.size());
-
-    currentSet =
-        Files.getPosixFilePermissions(keyLocation);
-    currentSet.removeAll(expectedSet);
-    Assert.assertEquals(0, currentSet.size());
-  }
-
-  /**
-   * Assert key rewrite fails without force option.
-   *
-   * @throws IOException - on I/O failure.
-   */
-  @Test
-  public void testReWriteKey()
-      throws Exception {
-    KeyPair kp = keyGenerator.generateKey();
-    HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
-    SecurityConfig secConfig = pemWriter.getSecurityConfig();
-    pemWriter.writeKey(kp);
-
-    // Assert that rewriting of keys throws exception with valid messages.
-    LambdaTestUtils
-        .intercept(IOException.class, "Private Key file already exists.",
-            () -> pemWriter.writeKey(kp));
-    FileUtils.deleteQuietly(Paths.get(
-        secConfig.getKeyLocation().toString() + "/" + secConfig
-            .getPrivateKeyFileName()).toFile());
-    LambdaTestUtils
-        .intercept(IOException.class, "Public Key file already exists.",
-            () -> pemWriter.writeKey(kp));
-    FileUtils.deleteQuietly(Paths.get(
-        secConfig.getKeyLocation().toString() + "/" + secConfig
-            .getPublicKeyFileName()).toFile());
-
-    // Should succeed now as both public and private key are deleted.
-    pemWriter.writeKey(kp);
-    // Should succeed with overwrite flag as true.
-    pemWriter.writeKey(kp, true);
-
-  }
-
-  /**
-   * Assert key rewrite fails in non Posix file system.
-   *
-   * @throws IOException - on I/O failure.
-   */
-  @Test
-  public void testWriteKeyInNonPosixFS()
-      throws Exception {
-    KeyPair kp = keyGenerator.generateKey();
-    HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(configuration);
-    pemWriter.setIsPosixFileSystem(() -> false);
-
-    // Assert key rewrite fails in non Posix file system.
-    LambdaTestUtils
-        .intercept(IOException.class, "Unsupported File System for pem file.",
-            () -> pemWriter.writeKey(kp));
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java
new file mode 100644
index 0000000..f0973f7
--- /dev/null
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java
@@ -0,0 +1,216 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.keys;
+
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+import java.security.KeyFactory;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.PKCS8EncodedKeySpec;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.Set;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.test.LambdaTestUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+/**
+ * Test class for HDDS pem writer.
+ */
+public class TestKeyCodec {
+
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+  private OzoneConfiguration configuration;
+  private HDDSKeyGenerator keyGenerator;
+  private String prefix;
+
+  @Before
+  public void init() throws IOException {
+    configuration = new OzoneConfiguration();
+    prefix = temporaryFolder.newFolder().toString();
+    configuration.set(HDDS_METADATA_DIR_NAME, prefix);
+    keyGenerator = new HDDSKeyGenerator(configuration);
+  }
+
+  /**
+   * Assert basic things like we are able to create a file, and the names are
+   * in expected format etc.
+   *
+   * @throws NoSuchProviderException - On Error, due to missing Java
+   * dependencies.
+   * @throws NoSuchAlgorithmException - On Error,  due to missing Java
+   * dependencies.
+   * @throws IOException - On I/O failure.
+   */
+  @Test
+  public void testWriteKey()
+      throws NoSuchProviderException, NoSuchAlgorithmException,
+      IOException, InvalidKeySpecException {
+    KeyPair keys = keyGenerator.generateKey();
+    KeyCodec pemWriter = new KeyCodec(configuration);
+    pemWriter.writeKey(keys);
+
+    // Assert that locations have been created.
+    Path keyLocation = pemWriter.getSecurityConfig().getKeyLocation();
+    Assert.assertTrue(keyLocation.toFile().exists());
+
+    // Assert that locations are created in the locations that we specified
+    // using the Config.
+    Assert.assertTrue(keyLocation.toString().startsWith(prefix));
+    Path privateKeyPath = Paths.get(keyLocation.toString(),
+        pemWriter.getSecurityConfig().getPrivateKeyFileName());
+    Assert.assertTrue(privateKeyPath.toFile().exists());
+    Path publicKeyPath = Paths.get(keyLocation.toString(),
+        pemWriter.getSecurityConfig().getPublicKeyFileName());
+    Assert.assertTrue(publicKeyPath.toFile().exists());
+
+    // Read the private key and test if the expected String in the PEM file
+    // format exists.
+    byte[] privateKey = Files.readAllBytes(privateKeyPath);
+    String privateKeydata = new String(privateKey, StandardCharsets.UTF_8);
+    Assert.assertTrue(privateKeydata.contains("PRIVATE KEY"));
+
+    // Read the public key and test if the expected String in the PEM file
+    // format exists.
+    byte[] publicKey = Files.readAllBytes(publicKeyPath);
+    String publicKeydata = new String(publicKey, StandardCharsets.UTF_8);
+    Assert.assertTrue(publicKeydata.contains("PUBLIC KEY"));
+
+    // Let us decode the PEM file and parse it back into binary.
+    KeyFactory kf = KeyFactory.getInstance(
+        pemWriter.getSecurityConfig().getKeyAlgo());
+
+    // Replace the PEM Human readable guards.
+    privateKeydata =
+        privateKeydata.replace("-----BEGIN PRIVATE KEY-----\n", "");
+    privateKeydata =
+        privateKeydata.replace("-----END PRIVATE KEY-----", "");
+
+    // Decode the bas64 to binary format and then use an ASN.1 parser to
+    // parse the binary format.
+
+    byte[] keyBytes = Base64.decodeBase64(privateKeydata);
+    PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
+    PrivateKey privateKeyDecoded = kf.generatePrivate(spec);
+    Assert.assertNotNull("Private Key should not be null",
+        privateKeyDecoded);
+
+    // Let us decode the public key and veriy that we can parse it back into
+    // binary.
+    publicKeydata =
+        publicKeydata.replace("-----BEGIN PUBLIC KEY-----\n", "");
+    publicKeydata =
+        publicKeydata.replace("-----END PUBLIC KEY-----", "");
+
+    keyBytes = Base64.decodeBase64(publicKeydata);
+    X509EncodedKeySpec pubKeyspec = new X509EncodedKeySpec(keyBytes);
+    PublicKey publicKeyDecoded = kf.generatePublic(pubKeyspec);
+    Assert.assertNotNull("Public Key should not be null",
+        publicKeyDecoded);
+
+    // Now let us assert the permissions on the Directories and files are as
+    // expected.
+    Set<PosixFilePermission> expectedSet = pemWriter.getPermissionSet();
+    Set<PosixFilePermission> currentSet =
+        Files.getPosixFilePermissions(privateKeyPath);
+    currentSet.removeAll(expectedSet);
+    Assert.assertEquals(0, currentSet.size());
+
+    currentSet =
+        Files.getPosixFilePermissions(publicKeyPath);
+    currentSet.removeAll(expectedSet);
+    Assert.assertEquals(0, currentSet.size());
+
+    currentSet =
+        Files.getPosixFilePermissions(keyLocation);
+    currentSet.removeAll(expectedSet);
+    Assert.assertEquals(0, currentSet.size());
+  }
+
+  /**
+   * Assert key rewrite fails without force option.
+   *
+   * @throws IOException - on I/O failure.
+   */
+  @Test
+  public void testReWriteKey()
+      throws Exception {
+    KeyPair kp = keyGenerator.generateKey();
+    KeyCodec pemWriter = new KeyCodec(configuration);
+    SecurityConfig secConfig = pemWriter.getSecurityConfig();
+    pemWriter.writeKey(kp);
+
+    // Assert that rewriting of keys throws exception with valid messages.
+    LambdaTestUtils
+        .intercept(IOException.class, "Private Key file already exists.",
+            () -> pemWriter.writeKey(kp));
+    FileUtils.deleteQuietly(Paths.get(
+        secConfig.getKeyLocation().toString() + "/" + secConfig
+            .getPrivateKeyFileName()).toFile());
+    LambdaTestUtils
+        .intercept(IOException.class, "Public Key file already exists.",
+            () -> pemWriter.writeKey(kp));
+    FileUtils.deleteQuietly(Paths.get(
+        secConfig.getKeyLocation().toString() + "/" + secConfig
+            .getPublicKeyFileName()).toFile());
+
+    // Should succeed now as both public and private key are deleted.
+    pemWriter.writeKey(kp);
+    // Should succeed with overwrite flag as true.
+    pemWriter.writeKey(kp, true);
+
+  }
+
+  /**
+   * Assert key rewrite fails in non Posix file system.
+   *
+   * @throws IOException - on I/O failure.
+   */
+  @Test
+  public void testWriteKeyInNonPosixFS()
+      throws Exception {
+    KeyPair kp = keyGenerator.generateKey();
+    KeyCodec pemWriter = new KeyCodec(configuration);
+    pemWriter.setIsPosixFileSystem(() -> false);
+
+    // Assert key rewrite fails in non Posix file system.
+    LambdaTestUtils
+        .intercept(IOException.class, "Unsupported File System for pem file.",
+            () -> pemWriter.writeKey(kp));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestDBStoreBuilder.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestDBStoreBuilder.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestDBStoreBuilder.java
index 47ad597..d5695c5 100644
--- 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestDBStoreBuilder.java
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestDBStoreBuilder.java
@@ -31,7 +31,6 @@ import org.junit.rules.TemporaryFolder;
 import java.io.File;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
 
 /**
  * Tests RDBStore creation.
@@ -131,7 +130,7 @@ public class TestDBStoreBuilder {
             RandomStringUtils.random(9).getBytes(StandardCharsets.UTF_8);
         firstTable.put(key, value);
         byte[] temp = firstTable.get(key);
-        Assert.assertTrue(Arrays.equals(value, temp));
+        Assert.assertArrayEquals(value, temp);
       }
 
       try (Table secondTable = dbStore.getTable("Second")) {
@@ -161,7 +160,7 @@ public class TestDBStoreBuilder {
             RandomStringUtils.random(9).getBytes(StandardCharsets.UTF_8);
         firstTable.put(key, value);
         byte[] temp = firstTable.get(key);
-        Assert.assertTrue(Arrays.equals(value, temp));
+        Assert.assertArrayEquals(value, temp);
       }
 
       try (Table secondTable = dbStore.getTable("Second")) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/87f51d23/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
----------------------------------------------------------------------
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
index c1695f1..7fb995e 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java
@@ -44,7 +44,7 @@ import org.apache.hadoop.hdds.scm.server.SCMStorage;
 import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
 import 
org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
 import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
-import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyPEMWriter;
+import org.apache.hadoop.hdds.security.x509.keys.KeyCodec;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.Client;
 import org.apache.hadoop.ipc.RPC;
@@ -86,7 +86,7 @@ public final class TestSecureOzoneCluster {
 
   private static final String TEST_USER = "testUgiUser";
   private static final int CLIENT_TIMEOUT = 2 * 1000;
-  private Logger LOGGER = LoggerFactory
+  private Logger logger = LoggerFactory
       .getLogger(TestSecureOzoneCluster.class);
 
   @Rule
@@ -125,9 +125,9 @@ public final class TestSecureOzoneCluster {
       createCredentialsInKDC(conf, miniKdc);
       generateKeyPair(conf);
     } catch (IOException e) {
-      LOGGER.error("Failed to initialize TestSecureOzoneCluster", e);
+      logger.error("Failed to initialize TestSecureOzoneCluster", e);
     } catch (Exception e) {
-      LOGGER.error("Failed to initialize TestSecureOzoneCluster", e);
+      logger.error("Failed to initialize TestSecureOzoneCluster", e);
     }
   }
 
@@ -146,7 +146,7 @@ public final class TestSecureOzoneCluster {
       }
       FileUtils.deleteQuietly(metaDirPath.toFile());
     } catch (Exception e) {
-      LOGGER.error("Failed to stop TestSecureOzoneCluster", e);
+      logger.error("Failed to stop TestSecureOzoneCluster", e);
     }
   }
 
@@ -449,7 +449,7 @@ public final class TestSecureOzoneCluster {
   private void generateKeyPair(OzoneConfiguration config) throws Exception {
     HDDSKeyGenerator keyGenerator = new HDDSKeyGenerator(conf);
     keyPair = keyGenerator.generateKey();
-    HDDSKeyPEMWriter pemWriter = new HDDSKeyPEMWriter(config);
+    KeyCodec pemWriter = new KeyCodec(config);
     pemWriter.writeKey(keyPair, true);
   }
 
@@ -460,8 +460,6 @@ public final class TestSecureOzoneCluster {
    */
   @Test
   public void testDelegationTokenRenewal() throws Exception {
-    // Capture logs for assertions.
-    LogCapturer logs = LogCapturer.captureLogs(Server.AUDITLOG);
     GenericTestUtils
         .setLogLevel(LoggerFactory.getLogger(Server.class.getName()), INFO);
 
@@ -477,7 +475,6 @@ public final class TestSecureOzoneCluster {
     om.start();
 
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-    String username = ugi.getUserName();
 
     // Get first OM client which will authenticate via Kerberos
     omClient = new OzoneManagerProtocolClientSideTranslatorPB(


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to