szetszwo commented on code in PR #9061:
URL: https://github.com/apache/ozone/pull/9061#discussion_r2383695129
##########
hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/KeyPrefixContainerCodec.java:
##########
@@ -50,24 +59,120 @@ public Class<KeyPrefixContainer> getTypeClass() {
return KeyPrefixContainer.class;
}
+ @Override
+ public boolean supportCodecBuffer() {
+ return true;
+ }
+
+ @Override
+ public CodecBuffer toCodecBuffer(@Nonnull KeyPrefixContainer object,
CodecBuffer.Allocator allocator) {
+ Preconditions.checkNotNull(object, "Null object can't be converted to
CodecBuffer.");
+
+ final byte[] keyPrefixBytes = object.getKeyPrefix().getBytes(UTF_8);
+ int totalSize = keyPrefixBytes.length;
+
+ if (object.getKeyVersion() != -1) {
+ totalSize += LONG_SERIALIZED_SIZE;
+ }
+ if (object.getContainerId() != -1) {
+ totalSize += LONG_SERIALIZED_SIZE;
+ }
+
+ final CodecBuffer buffer = allocator.apply(totalSize);
+ buffer.put(ByteBuffer.wrap(keyPrefixBytes));
+
+ if (object.getKeyVersion() != -1) {
+ buffer.put(KEY_DELIMITER_BUFFER.duplicate());
+ buffer.putLong(object.getKeyVersion());
+ }
+
+ if (object.getContainerId() != -1) {
+ buffer.put(KEY_DELIMITER_BUFFER.duplicate());
+ buffer.putLong(object.getContainerId());
+ }
+
+ return buffer;
+ }
+
+ @Override
+ public KeyPrefixContainer fromCodecBuffer(@Nonnull CodecBuffer buffer)
throws CodecException {
+
+ final ByteBuffer byteBuffer = buffer.asReadOnlyByteBuffer();
+ final int totalLength = byteBuffer.remaining();
+ final int startPosition = byteBuffer.position();
+ final int delimiterLength = KEY_DELIMITER_BYTES.length;
+
+ // We expect: keyPrefix + delimiter + version(8 bytes) + delimiter +
containerId(8 bytes)
+ final int minimumLength = delimiterLength + Long.BYTES + delimiterLength +
Long.BYTES;
+
+ if (totalLength < minimumLength) {
+ throw new CodecException("Buffer too small to contain all required
fields.");
+ }
+
+ int keyPrefixLength = totalLength - 2 * delimiterLength - 2 * Long.BYTES;
+ if (keyPrefixLength < 0) {
Review Comment:
This is the same as `(totalLength < minimumLength)`. So it can be removed.
##########
hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/KeyPrefixContainerCodec.java:
##########
@@ -50,24 +59,120 @@ public Class<KeyPrefixContainer> getTypeClass() {
return KeyPrefixContainer.class;
}
+ @Override
+ public boolean supportCodecBuffer() {
+ return true;
+ }
+
+ @Override
+ public CodecBuffer toCodecBuffer(@Nonnull KeyPrefixContainer object,
CodecBuffer.Allocator allocator) {
+ Preconditions.checkNotNull(object, "Null object can't be converted to
CodecBuffer.");
+
+ final byte[] keyPrefixBytes = object.getKeyPrefix().getBytes(UTF_8);
+ int totalSize = keyPrefixBytes.length;
+
+ if (object.getKeyVersion() != -1) {
+ totalSize += LONG_SERIALIZED_SIZE;
+ }
+ if (object.getContainerId() != -1) {
+ totalSize += LONG_SERIALIZED_SIZE;
+ }
+
+ final CodecBuffer buffer = allocator.apply(totalSize);
+ buffer.put(ByteBuffer.wrap(keyPrefixBytes));
+
+ if (object.getKeyVersion() != -1) {
+ buffer.put(KEY_DELIMITER_BUFFER.duplicate());
+ buffer.putLong(object.getKeyVersion());
+ }
+
+ if (object.getContainerId() != -1) {
+ buffer.put(KEY_DELIMITER_BUFFER.duplicate());
+ buffer.putLong(object.getContainerId());
+ }
+
+ return buffer;
+ }
+
+ @Override
+ public KeyPrefixContainer fromCodecBuffer(@Nonnull CodecBuffer buffer)
throws CodecException {
+
+ final ByteBuffer byteBuffer = buffer.asReadOnlyByteBuffer();
+ final int totalLength = byteBuffer.remaining();
+ final int startPosition = byteBuffer.position();
+ final int delimiterLength = KEY_DELIMITER_BYTES.length;
+
+ // We expect: keyPrefix + delimiter + version(8 bytes) + delimiter +
containerId(8 bytes)
+ final int minimumLength = delimiterLength + Long.BYTES + delimiterLength +
Long.BYTES;
+
+ if (totalLength < minimumLength) {
+ throw new CodecException("Buffer too small to contain all required
fields.");
+ }
+
+ int keyPrefixLength = totalLength - 2 * delimiterLength - 2 * Long.BYTES;
+ if (keyPrefixLength < 0) {
+ throw new CodecException("Invalid buffer format: negative key prefix
length");
+ }
+
+ byteBuffer.position(startPosition);
+ byteBuffer.limit(startPosition + keyPrefixLength);
+ String keyPrefix = decodeStringFromBuffer(byteBuffer);
+ byteBuffer.limit(startPosition + totalLength);
+
+ byteBuffer.position(startPosition + keyPrefixLength);
+ for (int i = 0; i < delimiterLength; i++) {
+ if (byteBuffer.get() != KEY_DELIMITER_BYTES[i]) {
+ throw new CodecException("Expected delimiter after keyPrefix at
position " +
+ (startPosition + keyPrefixLength));
+ }
+ }
+ long version = byteBuffer.getLong();
+ for (int i = 0; i < delimiterLength; i++) {
+ if (byteBuffer.get() != KEY_DELIMITER_BYTES[i]) {
+ throw new CodecException("Expected delimiter after version at position
" +
+ (startPosition + keyPrefixLength + delimiterLength + Long.BYTES));
+ }
+ }
+ long containerId = byteBuffer.getLong();
+
+ return KeyPrefixContainer.get(keyPrefix, version, containerId);
+ }
+
+ private static String decodeStringFromBuffer(ByteBuffer buffer) {
+ if (buffer.remaining() == 0) {
+ return "";
+ }
+
+ final byte[] bytes;
+ if (buffer.hasArray()) {
+ int offset = buffer.arrayOffset() + buffer.position();
+ int length = buffer.remaining();
+ bytes = new byte[length];
+ System.arraycopy(buffer.array(), offset, bytes, 0, length);
Review Comment:
Since `String` is immutable, `new String(..)` has to copy the array. So we
don't have to copy it.
##########
hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/KeyPrefixContainerCodec.java:
##########
@@ -50,24 +59,120 @@ public Class<KeyPrefixContainer> getTypeClass() {
return KeyPrefixContainer.class;
}
+ @Override
+ public boolean supportCodecBuffer() {
+ return true;
+ }
+
+ @Override
+ public CodecBuffer toCodecBuffer(@Nonnull KeyPrefixContainer object,
CodecBuffer.Allocator allocator) {
+ Preconditions.checkNotNull(object, "Null object can't be converted to
CodecBuffer.");
+
+ final byte[] keyPrefixBytes = object.getKeyPrefix().getBytes(UTF_8);
+ int totalSize = keyPrefixBytes.length;
+
+ if (object.getKeyVersion() != -1) {
+ totalSize += LONG_SERIALIZED_SIZE;
+ }
+ if (object.getContainerId() != -1) {
+ totalSize += LONG_SERIALIZED_SIZE;
+ }
+
+ final CodecBuffer buffer = allocator.apply(totalSize);
+ buffer.put(ByteBuffer.wrap(keyPrefixBytes));
+
+ if (object.getKeyVersion() != -1) {
+ buffer.put(KEY_DELIMITER_BUFFER.duplicate());
+ buffer.putLong(object.getKeyVersion());
+ }
+
+ if (object.getContainerId() != -1) {
+ buffer.put(KEY_DELIMITER_BUFFER.duplicate());
+ buffer.putLong(object.getContainerId());
+ }
+
+ return buffer;
+ }
+
+ @Override
+ public KeyPrefixContainer fromCodecBuffer(@Nonnull CodecBuffer buffer)
throws CodecException {
+
+ final ByteBuffer byteBuffer = buffer.asReadOnlyByteBuffer();
+ final int totalLength = byteBuffer.remaining();
+ final int startPosition = byteBuffer.position();
Review Comment:
If we call `slice()` first, `startPosition` becomes zero. I am fine for the
current implementation.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]