This is an automated email from the ASF dual-hosted git repository.
wchevreuil pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new 67420e33f97 HBASE-29627 Handle any block cache fetching errors when
reading a block in HFileReaderImpl (#7341)
67420e33f97 is described below
commit 67420e33f972478102a847c5673f233da1f071db
Author: Wellington Ramos Chevreuil <[email protected]>
AuthorDate: Thu Sep 25 11:32:23 2025 +0100
HBASE-29627 Handle any block cache fetching errors when reading a block in
HFileReaderImpl (#7341)
Signed-off-by: Peter Somogyi <[email protected]>
---
.../hadoop/hbase/io/hfile/HFileReaderImpl.java | 26 ++++++++++++++++++++++
.../hadoop/hbase/io/hfile/TestHFileReaderImpl.java | 22 ++++++++++++++++++
2 files changed, 48 insertions(+)
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
index 899a681131f..8f1bb3be7a5 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
@@ -1172,6 +1172,32 @@ public abstract class HFileReaderImpl implements
HFile.Reader, Configurable {
}
return cachedBlock;
}
+ } catch (Exception e) {
+ if (cachedBlock != null) {
+ returnAndEvictBlock(cache, cacheKey, cachedBlock);
+ }
+ LOG.warn("Failed retrieving block from cache with key {}. "
+ + "\n Evicting this block from cache and will read it from file
system. "
+ + "\n Exception details: ", cacheKey, e);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Further tracing details for failed block cache retrieval:"
+ + "\n Complete File path - {}," + "\n Expected Block Type - {},
Actual Block Type - {},"
+ + "\n Cache compressed - {}" + "\n Header size (after deserialized
from cache) - {}"
+ + "\n Size with header - {}" + "\n Uncompressed size without
header - {} "
+ + "\n Total byte buffer size - {}" + "\n Encoding code - {}",
this.path,
+ expectedBlockType, (cachedBlock != null ?
cachedBlock.getBlockType() : "N/A"),
+ (expectedBlockType != null
+ ?
cacheConf.shouldCacheCompressed(expectedBlockType.getCategory())
+ : "N/A"),
+ (cachedBlock != null ? cachedBlock.headerSize() : "N/A"),
+ (cachedBlock != null ? cachedBlock.getOnDiskSizeWithHeader() :
"N/A"),
+ (cachedBlock != null ?
cachedBlock.getUncompressedSizeWithoutHeader() : "N/A"),
+ (cachedBlock != null ? cachedBlock.getBufferReadOnly().limit() :
"N/A"),
+ (cachedBlock != null
+ ?
cachedBlock.getBufferReadOnly().getShort(cachedBlock.headerSize())
+ : "N/A"));
+ }
+ return null;
} finally {
// Count bytes read as cached block is being returned
if (isScanMetricsEnabled && cachedBlock != null) {
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java
index c87897de818..6c84312cf59 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java
@@ -18,7 +18,12 @@
package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyBoolean;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
@@ -116,6 +121,23 @@ public class TestHFileReaderImpl {
}
}
+ @Test
+ public void testReadWorksWhenCacheCorrupt() throws Exception {
+ BlockCache mockedCache = mock(BlockCache.class);
+ when(mockedCache.getBlock(any(), anyBoolean(), anyBoolean(), anyBoolean(),
any()))
+ .thenThrow(new RuntimeException("Injected error"));
+ Path p = makeNewFile();
+ FileSystem fs = TEST_UTIL.getTestFileSystem();
+ Configuration conf = TEST_UTIL.getConfiguration();
+ HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf,
mockedCache), true, conf);
+ long offset = 0;
+ while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
+ HFileBlock block = reader.readBlock(offset, -1, false, true, false,
true, null, null, false);
+ assertNotNull(block);
+ offset += block.getOnDiskSizeWithHeader();
+ }
+ }
+
@Test
public void testSeekBefore() throws Exception {
Path p = makeNewFile();