This is an automated email from the ASF dual-hosted git repository.

szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/master by this push:
     new d8117f9f58 HDDS-8761. Support CodecBuffer for X509CertificateCodec. 
(#4831)
d8117f9f58 is described below

commit d8117f9f58f66044da9c3d04b6266e2061df4a1c
Author: Tsz-Wo Nicholas Sze <[email protected]>
AuthorDate: Wed Jun 7 07:12:39 2023 +0800

    HDDS-8761. Support CodecBuffer for X509CertificateCodec. (#4831)
---
 .../x509/certificate/utils/CertificateCodec.java   |  48 ++++++++--
 .../org/apache/hadoop/hdds/utils/db/Codec.java     |  12 ++-
 .../apache/hadoop/hdds/utils/db/CodecBuffer.java   |   2 +-
 .../hadoop/hdds/utils/db/StringCodecBase.java      |   8 +-
 .../hadoop/hdds/utils/io/LengthOutputStream.java   |  55 ++++++++++++
 .../apache/hadoop/hdds/utils/io/package-info.java  |  22 +++++
 .../apache/hadoop/hdds/utils/db/CodecTestUtil.java | 100 +++++++++++++++++++++
 .../org/apache/hadoop/hdds/utils/db/TestCodec.java |  54 +----------
 .../hdds/scm/metadata/X509CertificateCodec.java    |  51 ++++++++---
 .../OldX509CertificateCodecForTesting.java}        |  14 +--
 .../hdds/scm/metadata/TestPipelineIDCodec.java     |   3 +
 .../scm/metadata/TestX509CertificateCodec.java     |  70 +++++++++++++++
 12 files changed, 360 insertions(+), 79 deletions(-)

diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
index 75e69fcfe0..d02cc8d1a2 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/utils/CertificateCodec.java
@@ -35,7 +35,10 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import java.io.StringWriter;
+import java.io.Writer;
 import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
@@ -133,6 +136,32 @@ public class CertificateCodec {
     }
   }
 
+  /**
+   * Encode the given certificate in PEM
+   * and then write it out to the given {@link OutputStream}.
+   *
+   * @param <OUT> The output type.
+   */
+  public static <OUT extends OutputStream> OUT writePEMEncoded(
+      X509Certificate certificate, OUT out) throws IOException {
+    writePEMEncoded(certificate, new OutputStreamWriter(out, DEFAULT_CHARSET));
+    return out;
+  }
+
+  /**
+   * Encode the given certificate in PEM
+   * and then write it out to the given {@link Writer}.
+   *
+   * @param <W> The writer type.
+   */
+  public static <W extends Writer> W writePEMEncoded(
+      X509Certificate certificate, W writer) throws IOException {
+    try (JcaPEMWriter pemWriter = new JcaPEMWriter(writer)) {
+      pemWriter.writeObject(certificate);
+    }
+    return writer;
+  }
+
   /**
    * Returns the Certificate as a PEM encoded String.
    *
@@ -143,11 +172,7 @@ public class CertificateCodec {
   public static String getPEMEncodedString(X509Certificate certificate)
       throws SCMSecurityException {
     try {
-      StringWriter stringWriter = new StringWriter();
-      try (JcaPEMWriter pemWriter = new JcaPEMWriter(stringWriter)) {
-        pemWriter.writeObject(certificate);
-      }
-      return stringWriter.toString();
+      return writePEMEncoded(certificate, new StringWriter()).toString();
     } catch (IOException e) {
       LOG.error("Error in encoding certificate." + certificate
           .getSubjectDN().toString(), e);
@@ -173,10 +198,16 @@ public class CertificateCodec {
   public static <E extends Exception> X509Certificate getX509Certificate(
       String pemEncoded, Function<CertificateException, E> convertor)
       throws E {
-    CertificateFactory fact = getCertFactory();
     // ByteArrayInputStream.close(), which is a noop, can be safely ignored.
     final ByteArrayInputStream input = new ByteArrayInputStream(
         pemEncoded.getBytes(DEFAULT_CHARSET));
+    return readX509Certificate(input, convertor);
+  }
+
+  private static <E extends Exception> X509Certificate readX509Certificate(
+      InputStream input, Function<CertificateException, E> convertor)
+      throws E {
+    final CertificateFactory fact = getCertFactory();
     try {
       return (X509Certificate) fact.engineGenerateCertificate(input);
     } catch (CertificateException e) {
@@ -184,6 +215,11 @@ public class CertificateCodec {
     }
   }
 
+  public static X509Certificate readX509Certificate(InputStream input)
+      throws IOException {
+    return readX509Certificate(input, CertificateCodec::toIOException);
+  }
+
   public static IOException toIOException(CertificateException e) {
     return new IOException("Failed to engineGenerateCertificate", e);
   }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
index ad815e1e49..7e7254f843 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
@@ -14,7 +14,6 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
- *
  */
 package org.apache.hadoop.hdds.utils.db;
 
@@ -75,6 +74,17 @@ public interface Codec<T> {
     return toCodecBuffer(object, CodecBuffer::allocateDirect);
   }
 
+  /**
+   * Serialize the given object to bytes.
+   *
+   * @param object The object to be serialized.
+   * @return a heap buffer storing the serialized bytes.
+   */
+  default CodecBuffer toHeapCodecBuffer(@Nonnull T object)
+      throws IOException {
+    return toCodecBuffer(object, CodecBuffer::allocateHeap);
+  }
+
   /**
    * Deserialize an object from the given buffer.
    *
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
index c8550d59c7..abb0f79bab 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecBuffer.java
@@ -280,7 +280,7 @@ public final class CodecBuffer implements AutoCloseable {
    * @return this object.
    * @throws IOException in case the source throws an {@link IOException}.
    */
-  CodecBuffer put(
+  public CodecBuffer put(
       CheckedFunction<OutputStream, Integer, IOException> source)
       throws IOException {
     assertRefCnt(1);
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
index 799b7a9381..f9db36d889 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodecBase.java
@@ -33,6 +33,7 @@ import java.nio.charset.CharsetDecoder;
 import java.nio.charset.CharsetEncoder;
 import java.nio.charset.CoderResult;
 import java.nio.charset.CodingErrorAction;
+import java.util.Objects;
 import java.util.function.Function;
 import java.util.function.IntFunction;
 
@@ -147,7 +148,9 @@ abstract class StringCodecBase implements Codec<String> {
       // and then wrap it as a buffer for encoding.
       final byte[] array = new byte[serializedSize];
       final Integer encoded = encoder.apply(ByteBuffer.wrap(array));
-      Preconditions.assertSame(serializedSize, encoded, "serializedSize");
+      Objects.requireNonNull(encoded, "encoded == null");
+      Preconditions.assertSame(serializedSize.intValue(), encoded.intValue(),
+          "serializedSize");
       return array;
     } else {
       // When the serialized size is unknown, allocate a larger buffer
@@ -155,8 +158,7 @@ abstract class StringCodecBase implements Codec<String> {
       try (CodecBuffer buffer = CodecBuffer.allocateHeap(upperBound)) {
         buffer.putFromSource(encoder);
 
-        // require a buffer copying
-        // unless upperBound equals to the serialized size.
+        // copy the buffer to an array in order to release the buffer.
         return buffer.getArray();
       }
     }
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/io/LengthOutputStream.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/io/LengthOutputStream.java
new file mode 100644
index 0000000000..3f8fcd9f56
--- /dev/null
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/io/LengthOutputStream.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.utils.io;
+
+import javax.annotation.Nonnull;
+import java.io.FilterOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * An {@link FilterOutputStream} recording the length of bytes written.
+ */
+public class LengthOutputStream extends FilterOutputStream {
+  private int length;
+
+  /**
+   * Create an {@link FilterOutputStream}
+   * which writes to the given underlying {@link OutputStream}.
+   */
+  public LengthOutputStream(OutputStream out) {
+    super(out);
+  }
+
+  /** @return the length. */
+  public int getLength() {
+    return length;
+  }
+
+  @Override
+  public void write(int b) throws IOException {
+    out.write(b);
+    length++;
+  }
+
+  @Override
+  public void write(@Nonnull byte[] b, int off, int len) throws IOException {
+    out.write(b, off, len);
+    length += len;
+  }
+}
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/io/package-info.java
 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/io/package-info.java
new file mode 100644
index 0000000000..d7f06566f6
--- /dev/null
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/io/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package for io related utilities.
+ */
+package org.apache.hadoop.hdds.utils.io;
diff --git 
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/CodecTestUtil.java
 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/CodecTestUtil.java
new file mode 100644
index 0000000000..785737ff57
--- /dev/null
+++ 
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/CodecTestUtil.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdds.utils.db;
+
+import org.junit.jupiter.api.Assertions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+
+/**
+ * Test {@link Codec} implementations.
+ */
+public final class CodecTestUtil {
+  static final Logger LOG = LoggerFactory.getLogger(CodecTestUtil.class);
+
+  private CodecTestUtil() {
+  }
+
+  /**
+   * Force gc to check leakage.
+   */
+  static void gc() throws InterruptedException {
+    // use WeakReference to detect gc
+    Object obj = new Object();
+    final WeakReference<Object> weakRef = new WeakReference<>(obj);
+    obj = null;
+
+    // loop until gc has completed.
+    for (int i = 0; weakRef.get() != null; i++) {
+      LOG.info("gc {}", i);
+      System.gc();
+      Thread.sleep(100);
+    }
+    CodecBuffer.assertNoLeaks();
+  }
+
+  public static <T> void runTest(Codec<T> codec, T original,
+      Integer serializedSize, Codec<T> oldCodec) throws Exception {
+    Assertions.assertTrue(codec.supportCodecBuffer());
+
+    // serialize to byte[]
+    final byte[] array = codec.toPersistedFormat(original);
+    LOG.info("encoded length = " + array.length);
+    if (serializedSize != null) {
+      Assertions.assertEquals(serializedSize, array.length);
+    }
+    if (oldCodec != null) {
+      final byte[] expected = oldCodec.toPersistedFormat(original);
+      Assertions.assertArrayEquals(expected, array);
+    }
+    // deserialize from byte[]
+    final T fromArray = codec.fromPersistedFormat(array);
+    Assertions.assertEquals(original, fromArray);
+
+    // serialize to CodecBuffer
+    final CodecBuffer codecBuffer = codec.toCodecBuffer(
+        original, CodecBuffer::allocateHeap);
+    Assertions.assertEquals(array.length, codecBuffer.readableBytes());
+    final ByteBuffer byteBuffer = codecBuffer.asReadOnlyByteBuffer();
+    Assertions.assertEquals(array.length, byteBuffer.remaining());
+    for (int i = 0; i < array.length; i++) {
+      // assert exact content
+      Assertions.assertEquals(array[i], byteBuffer.get(i));
+    }
+    if (oldCodec != null && oldCodec.supportCodecBuffer()) {
+      try (CodecBuffer expected = oldCodec.toHeapCodecBuffer(original)) {
+        Assertions.assertEquals(expected.asReadOnlyByteBuffer(),
+            codecBuffer.asReadOnlyByteBuffer());
+      }
+    }
+
+    // deserialize from CodecBuffer
+    final T fromBuffer = codec.fromCodecBuffer(codecBuffer);
+    codecBuffer.release();
+    Assertions.assertEquals(original, fromBuffer);
+
+    // deserialize from wrapped buffer
+    final CodecBuffer wrapped = CodecBuffer.wrap(array);
+    final T fromWrappedArray = codec.fromCodecBuffer(wrapped);
+    wrapped.release();
+    Assertions.assertEquals(original, fromWrappedArray);
+  }
+}
diff --git 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestCodec.java
 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestCodec.java
index 7060a30daa..cdcef15a86 100644
--- 
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestCodec.java
+++ 
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestCodec.java
@@ -29,13 +29,12 @@ import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hdds.utils.db.RDBBatchOperation.Bytes;
 
 import java.io.IOException;
-import java.lang.ref.WeakReference;
-import java.nio.ByteBuffer;
 import java.util.UUID;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.function.Consumer;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.hadoop.hdds.utils.db.CodecTestUtil.gc;
 
 /**
  * Test {@link Codec} implementations.
@@ -44,22 +43,6 @@ public final class TestCodec {
   static final Logger LOG = LoggerFactory.getLogger(TestCodec.class);
   static final int NUM_LOOPS = 10;
 
-  /** Force gc to check leakage. */
-  static void gc() throws InterruptedException {
-    // use WeakReference to detect gc
-    Object obj = new Object();
-    final WeakReference<Object> weakRef = new WeakReference<>(obj);
-    obj = null;
-
-    // loop until gc has completed.
-    for (int i = 0; weakRef.get() != null; i++) {
-      LOG.info("gc {}", i);
-      System.gc();
-      Thread.sleep(100);
-    }
-    CodecBuffer.assertNoLeaks();
-  }
-
   @Test
   public void testShortCodec() throws Exception {
     runTestShortCodec((short)0);
@@ -251,40 +234,9 @@ public final class TestCodec {
     gc();
   }
 
-  static <T> void runTest(Codec<T> codec, T original,
+  public static <T> void runTest(Codec<T> codec, T original,
       Integer serializedSize) throws Exception {
-    Assertions.assertTrue(codec.supportCodecBuffer());
-
-    // serialize to byte[]
-    final byte[] array = codec.toPersistedFormat(original);
-    if (serializedSize != null) {
-      Assertions.assertEquals(serializedSize, array.length);
-    }
-    // deserialize from byte[]
-    final T fromArray = codec.fromPersistedFormat(array);
-    Assertions.assertEquals(original, fromArray);
-
-    // serialize to CodecBuffer
-    final CodecBuffer codecBuffer = codec.toCodecBuffer(
-        original, CodecBuffer::allocateHeap);
-    final ByteBuffer byteBuffer = codecBuffer.asReadOnlyByteBuffer();
-    Assertions.assertEquals(array.length, byteBuffer.remaining());
-    for (int i = 0; i < array.length; i++) {
-      // assert exact content
-      Assertions.assertEquals(array[i], byteBuffer.get(i));
-    }
-
-    // deserialize from CodecBuffer
-    final T fromBuffer = codec.fromCodecBuffer(codecBuffer);
-    codecBuffer.release();
-    Assertions.assertEquals(original, fromBuffer);
-
-    // deserialize from wrapped buffer
-    final CodecBuffer wrapped = CodecBuffer.wrap(array);
-    final T fromWrappedArray = codec.fromCodecBuffer(wrapped);
-    wrapped.release();
-    Assertions.assertEquals(original, fromWrappedArray);
-
+    CodecTestUtil.runTest(codec, original, serializedSize, null);
     runTestBytes(original, codec);
   }
 
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
index 56a0165c22..c1a96816be 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
@@ -19,18 +19,27 @@
 
 package org.apache.hadoop.hdds.scm.metadata;
 
+import java.io.ByteArrayInputStream;
 import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.security.cert.CertificateException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.security.cert.X509Certificate;
+import java.util.function.IntFunction;
+
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
 import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CodecBuffer;
+import org.apache.hadoop.hdds.utils.io.LengthOutputStream;
+import org.apache.ratis.util.function.CheckedFunction;
+
+import javax.annotation.Nonnull;
 
 /**
  * Codec to serialize/deserialize {@link X509Certificate}.
  */
 public final class X509CertificateCodec implements Codec<X509Certificate> {
+  private static final int INITIAL_CAPACITY = 4 << 10; // 4 KB
 
   private static final Codec<X509Certificate> INSTANCE =
       new X509CertificateCodec();
@@ -43,11 +52,35 @@ public final class X509CertificateCodec implements 
Codec<X509Certificate> {
     // singleton
   }
 
+  @Override
+  public boolean supportCodecBuffer() {
+    return true;
+  }
+
+  CheckedFunction<OutputStream, Integer, IOException> writeTo(
+      X509Certificate object) {
+    return out -> CertificateCodec.writePEMEncoded(object,
+        new LengthOutputStream(out)).getLength();
+  }
+
+  @Override
+  public CodecBuffer toCodecBuffer(@Nonnull X509Certificate object,
+      IntFunction<CodecBuffer> allocator) throws IOException {
+    return allocator.apply(-INITIAL_CAPACITY).put(writeTo(object));
+  }
+
+  @Override
+  public X509Certificate fromCodecBuffer(@Nonnull CodecBuffer buffer)
+      throws IOException {
+    try (InputStream in = buffer.getInputStream()) {
+      return CertificateCodec.readX509Certificate(in);
+    }
+  }
+
   @Override
   public byte[] toPersistedFormat(X509Certificate object) throws IOException {
-    try {
-      return CertificateCodec.getPEMEncodedString(object)
-          .getBytes(StandardCharsets.UTF_8);
+    try (CodecBuffer buffer = toHeapCodecBuffer(object)) {
+      return buffer.getArray();
     } catch (SCMSecurityException exp) {
       throw new IOException(exp);
     }
@@ -56,12 +89,8 @@ public final class X509CertificateCodec implements 
Codec<X509Certificate> {
   @Override
   public X509Certificate fromPersistedFormat(byte[] rawData)
       throws IOException {
-    try {
-      String s = new String(rawData, StandardCharsets.UTF_8);
-      return CertificateCodec.getX509Certificate(s);
-    } catch (CertificateException exp) {
-      throw new IOException(exp);
-    }
+    return CertificateCodec.readX509Certificate(
+        new ByteArrayInputStream(rawData));
   }
 
   @Override
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
similarity index 91%
copy from 
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
copy to 
hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
index 56a0165c22..67593dc777 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
+++ 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/OldX509CertificateCodecForTesting.java
@@ -19,27 +19,29 @@
 
 package org.apache.hadoop.hdds.scm.metadata;
 
+import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
+import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
+import org.apache.hadoop.hdds.utils.db.Codec;
+
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
-import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
-import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
-import org.apache.hadoop.hdds.utils.db.Codec;
 
 /**
  * Codec to serialize/deserialize {@link X509Certificate}.
  */
-public final class X509CertificateCodec implements Codec<X509Certificate> {
+public final class OldX509CertificateCodecForTesting
+    implements Codec<X509Certificate> {
 
   private static final Codec<X509Certificate> INSTANCE =
-      new X509CertificateCodec();
+      new OldX509CertificateCodecForTesting();
 
   public static Codec<X509Certificate> get() {
     return INSTANCE;
   }
 
-  private X509CertificateCodec() {
+  private OldX509CertificateCodecForTesting() {
     // singleton
   }
 
diff --git 
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java
 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java
index 83c084232c..25f1fd3716 100644
--- 
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java
+++ 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java
@@ -23,6 +23,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
 import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CodecTestUtil;
 import org.junit.jupiter.api.Test;
 
 import java.util.UUID;
@@ -126,6 +127,8 @@ public class TestPipelineIDCodec {
 
     assertEquals(pid, oldCodec.fromPersistedFormat(expected));
     assertEquals(pid, newCodec.fromPersistedFormat(expected));
+
+    CodecTestUtil.runTest(newCodec, pid, 16, oldCodec);
   }
 
   private void checkPersisting(
diff --git 
a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestX509CertificateCodec.java
 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestX509CertificateCodec.java
new file mode 100644
index 0000000000..035138fb2b
--- /dev/null
+++ 
b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestX509CertificateCodec.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+package org.apache.hadoop.hdds.scm.metadata;
+
+import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
+import org.apache.hadoop.hdds.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.CodecTestUtil;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.cert.X509Certificate;
+import java.util.concurrent.ThreadLocalRandom;
+
+
+/**
+ * Class to test {@link X509CertificateCodec}.
+ */
+public class TestX509CertificateCodec {
+  static final Logger LOG = LoggerFactory.getLogger(
+      TestX509CertificateCodec.class);
+
+  private final Codec<X509Certificate> oldCodec
+      = OldX509CertificateCodecForTesting.get();
+  private final Codec<X509Certificate> newCodec
+      = X509CertificateCodec.get();
+
+  public static KeyPair genKeyPair(String algorithm, int keySize)
+      throws NoSuchAlgorithmException {
+    LOG.info("genKeyPair: {}, keySize={}", algorithm, keySize);
+    final KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
+    keyGen.initialize(keySize);
+    return keyGen.genKeyPair();
+  }
+
+  @Test
+  public void testRSA() throws Exception {
+    for (int n = 512; n <= 4096; n <<= 1) {
+      runTestRSA(n);
+    }
+  }
+
+  public void runTestRSA(int keySize) throws Exception {
+    final KeyPair rsa = genKeyPair("RSA", keySize);
+    final int days = ThreadLocalRandom.current().nextInt(100) + 1;
+    final X509Certificate x509 = KeyStoreTestUtil.generateCertificate(
+        "CN=testRSA" + keySize, rsa, days, "SHA256withRSA");
+    System.out.println(CertificateCodec.getPEMEncodedString(x509));
+    CodecTestUtil.runTest(newCodec, x509, null, oldCodec);
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to