HBASE-16644 Errors when reading legit HFile Trailer of old (v2.0) format file


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5ae516bd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5ae516bd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5ae516bd

Branch: refs/heads/hbase-12439
Commit: 5ae516bd632afd8de6cf113235365877525c1243
Parents: 0daeb63
Author: Mikhail Antonov <anto...@apache.org>
Authored: Tue Oct 4 21:10:42 2016 -0700
Committer: Mikhail Antonov <anto...@apache.org>
Committed: Tue Oct 4 21:10:42 2016 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/HFileBlock.java       | 24 ++++++++++++--------
 1 file changed, 14 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/5ae516bd/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 9d2ccb2..13b501a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -371,15 +371,16 @@ public class HFileBlock implements Cacheable {
     final int uncompressedSizeWithoutHeader =
         buf.getInt(Header.UNCOMPRESSED_SIZE_WITHOUT_HEADER_INDEX);
     final long prevBlockOffset = buf.getLong(Header.PREV_BLOCK_OFFSET_INDEX);
-    byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);
-    int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);
-    int onDiskDataSizeWithHeader = 
buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);
     // This constructor is called when we deserialize a block from cache and 
when we read a block in
     // from the fs. fileCache is null when deserialized from cache so need to 
make up one.
     HFileContextBuilder fileContextBuilder = fileContext != null?
         new HFileContextBuilder(fileContext): new HFileContextBuilder();
     fileContextBuilder.withHBaseCheckSum(usesHBaseChecksum);
+    int onDiskDataSizeWithHeader;
     if (usesHBaseChecksum) {
+      byte checksumType = buf.get(Header.CHECKSUM_TYPE_INDEX);
+      int bytesPerChecksum = buf.getInt(Header.BYTES_PER_CHECKSUM_INDEX);
+      onDiskDataSizeWithHeader = 
buf.getInt(Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);
       // Use the checksum type and bytes per checksum from header, not from 
filecontext.
       
fileContextBuilder.withChecksumType(ChecksumType.codeToType(checksumType));
       fileContextBuilder.withBytesPerCheckSum(bytesPerChecksum);
@@ -419,11 +420,12 @@ public class HFileBlock implements Cacheable {
   /**
    * Parse total ondisk size including header and checksum.
    * @param headerBuf Header ByteBuffer. Presumed exact size of header.
+   * @param verifyChecksum true if checksum verification is in use.
    * @return Size of the block with header included.
    */
-  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf) {
-    // Set hbase checksum to true always calling headerSize.
-    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) + 
headerSize(true);
+  private static int getOnDiskSizeWithHeader(final ByteBuffer headerBuf, 
boolean verifyChecksum) {
+    return headerBuf.getInt(Header.ON_DISK_SIZE_WITHOUT_HEADER_INDEX) +
+      headerSize(verifyChecksum);
   }
 
   /**
@@ -1659,10 +1661,10 @@ public class HFileBlock implements Cacheable {
      * @throws IOException
      */
     private void verifyOnDiskSizeMatchesHeader(final int passedIn, final 
ByteBuffer headerBuf,
-        final long offset)
+        final long offset, boolean verifyChecksum)
     throws IOException {
       // Assert size provided aligns with what is in the header
-      int fromHeader = getOnDiskSizeWithHeader(headerBuf);
+      int fromHeader = getOnDiskSizeWithHeader(headerBuf, verifyChecksum);
       if (passedIn != fromHeader) {
         throw new IOException("Passed in onDiskSizeWithHeader=" + passedIn + " 
!= " + fromHeader +
             ", offset=" + offset + ", fileContext=" + this.fileContext);
@@ -1703,7 +1705,8 @@ public class HFileBlock implements Cacheable {
           readAtOffset(is, headerBuf.array(), headerBuf.arrayOffset(), 
hdrSize, false,
               offset, pread);
         }
-        onDiskSizeWithHeader = getOnDiskSizeWithHeader(headerBuf);
+        onDiskSizeWithHeader = getOnDiskSizeWithHeader(headerBuf,
+          this.fileContext.isUseHBaseChecksum());
       }
       int preReadHeaderSize = headerBuf == null? 0 : hdrSize;
       // Allocate enough space to fit the next block's header too; saves a 
seek next time through.
@@ -1722,7 +1725,8 @@ public class HFileBlock implements Cacheable {
       }
       // Do a few checks before we go instantiate HFileBlock.
       assert onDiskSizeWithHeader > this.hdrSize;
-      verifyOnDiskSizeMatchesHeader(onDiskSizeWithHeader, headerBuf, offset);
+      verifyOnDiskSizeMatchesHeader(onDiskSizeWithHeader, headerBuf, offset,
+        this.fileContext.isUseHBaseChecksum());
       ByteBuffer onDiskBlockByteBuffer = ByteBuffer.wrap(onDiskBlock, 0, 
onDiskSizeWithHeader);
       // Verify checksum of the data before using it for building HFileBlock.
       if (verifyChecksum &&

Reply via email to