This is an automated email from the ASF dual-hosted git repository.
andor pushed a commit to branch HBASE-29081
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/HBASE-29081 by this push:
new 1a0434dfa82 HBASE-29579: AssignmentManager is trying to pick up the
other cluster's meta table after HbckChore run (#7304)
1a0434dfa82 is described below
commit 1a0434dfa825a347ce7c9ed6e26e1f28c978474e
Author: Kota-SH <[email protected]>
AuthorDate: Fri Sep 19 11:39:59 2025 -0500
HBASE-29579: AssignmentManager is trying to pick up the other cluster's
meta table after HbckChore run (#7304)
---
.../org/apache/hadoop/hbase/master/HMaster.java | 7 ++++
.../apache/hadoop/hbase/master/hbck/HbckChore.java | 3 +-
.../hadoop/hbase/util/FSTableDescriptors.java | 3 +-
.../java/org/apache/hadoop/hbase/util/FSUtils.java | 19 ++++++++++
.../hadoop/hbase/master/TestMasterMetrics.java | 33 +++++++++++++++++
.../hbase/master/assignment/TestHbckChore.java | 43 ++++++++++++++++++++++
.../hadoop/hbase/util/TestFSTableDescriptors.java | 33 +++++++++++++++++
7 files changed, 139 insertions(+), 2 deletions(-)
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 7a352b4c016..757699ccccb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -3117,6 +3117,13 @@ public class HMaster extends
HBaseServerBase<MasterRpcServices> implements Maste
Map<String, TableDescriptor> tableDescriptorMap =
getTableDescriptors().getAll();
for (TableDescriptor tableDescriptor :
tableDescriptorMap.values()) {
TableName tableName = tableDescriptor.getTableName();
+ if (
+ tableName.isSystemTable() &&
tableName.getQualifierAsString().startsWith("meta")
+ && !tableName.equals(TableName.META_TABLE_NAME)
+ ) {
+ LOG.info("Skipping foreign meta table {} in cluster
metrics", tableName);
+ continue;
+ }
RegionStatesCount regionStatesCount =
assignmentManager.getRegionStatesCount(tableName);
tableRegionStatesCountMap.put(tableName, regionStatesCount);
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/hbck/HbckChore.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/hbck/HbckChore.java
index 75df2da5a71..34dc1ea81e2 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/hbck/HbckChore.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/hbck/HbckChore.java
@@ -261,7 +261,8 @@ public class HbckChore extends ScheduledChore {
FileSystem fs = master.getMasterFileSystem().getFileSystem();
int numRegions = 0;
- List<Path> tableDirs = FSUtils.getTableDirs(fs, rootDir);
+ List<Path> tableDirs =
+ FSUtils.getTableDirs(fs,
rootDir).stream().filter(FSUtils::isLocalMetaTable).toList();
for (Path tableDir : tableDirs) {
List<Path> regionDirs = FSUtils.getRegionDirs(fs, tableDir);
for (Path regionDir : regionDirs) {
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index b32fad50f0f..f34991279d2 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -270,7 +270,8 @@ public class FSTableDescriptors implements TableDescriptors
{
LOG.info("Fetching table descriptors from the filesystem.");
final long startTime = EnvironmentEdgeManager.currentTime();
AtomicBoolean allvisited = new AtomicBoolean(usecache);
- List<Path> tableDirs = FSUtils.getTableDirs(fs, rootdir);
+ List<Path> tableDirs =
+ FSUtils.getTableDirs(fs,
rootdir).stream().filter(FSUtils::isLocalMetaTable).toList();
if (!tableDescriptorParallelLoadEnable) {
for (Path dir : tableDirs) {
internalGet(dir, tds, allvisited);
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index 98e1afc4d97..06ea8a9d52b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -1046,6 +1046,25 @@ public final class FSUtils {
return tabledirs;
}
+ /**
+ * A filter to exclude meta tables belonging to foreign clusters. This is
essential in a
+ * read-replica setup where multiple clusters share the same fs.
+ * @param tablePath The Path to the table directory.
+ * @return {@code true} if the path is a regular table or the cluster's own
meta table.
+ * {@code false} if it is a meta table belonging to a different
cluster.
+ */
+ public static boolean isLocalMetaTable(Path tablePath) {
+ if (tablePath == null) {
+ return false;
+ }
+ String dirName = tablePath.getName();
+ if (dirName.startsWith(TableName.META_TABLE_NAME.getQualifierAsString())) {
+ return
TableName.valueOf(TableName.META_TABLE_NAME.getNamespaceAsString(), dirName)
+ .equals(TableName.META_TABLE_NAME);
+ }
+ return true;
+ }
+
/**
* Filter for all dirs that don't start with '.'
*/
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
index 09618b3d899..07dacf6f35c 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java
@@ -17,10 +17,16 @@
*/
package org.apache.hadoop.hbase.master;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.util.EnumSet;
import java.util.HashMap;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.CompatibilityFactory;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
@@ -28,7 +34,12 @@ import org.apache.hadoop.hbase.ServerMetricsBuilder;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.StartTestingClusterOption;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.YouAreDeadException;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.RegionStatesCount;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -192,4 +203,26 @@ public class TestMasterMetrics {
MetricsMasterProcSource masterSource =
master.getMasterMetrics().getMetricsProcSource();
metricsHelper.assertGauge("numMasterWALs", master.getNumWALFiles(),
masterSource);
}
+
+ @Test
+ public void testClusterMetricsMetaTableSkipping() throws Exception {
+ TableName replicaMetaTable = TableName.valueOf("hbase", "meta_replica");
+ TableDescriptor replicaMetaDescriptor =
TableDescriptorBuilder.newBuilder(replicaMetaTable)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of("info")).build();
+ master.getTableDescriptors().update(replicaMetaDescriptor, true);
+ try {
+ ClusterMetrics metrics = master.getClusterMetricsWithoutCoprocessor(
+ EnumSet.of(ClusterMetrics.Option.TABLE_TO_REGIONS_COUNT));
+ Map<TableName, RegionStatesCount> tableRegionStatesCount =
+ metrics.getTableRegionStatesCount();
+
+ assertFalse("Foreign meta table should not be present",
+ tableRegionStatesCount.containsKey(replicaMetaTable));
+ assertTrue("Local meta should be present",
+ tableRegionStatesCount.containsKey(TableName.META_TABLE_NAME));
+
+ } finally {
+ master.getTableDescriptors().remove(replicaMetaTable);
+ }
+ }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestHbckChore.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestHbckChore.java
index 70afeae4c6e..4282f4e38f2 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestHbckChore.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestHbckChore.java
@@ -31,20 +31,28 @@ import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Future;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.TableStateManager;
import org.apache.hadoop.hbase.master.hbck.HbckChore;
import org.apache.hadoop.hbase.master.hbck.HbckReport;
+import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.junit.Before;
@@ -247,4 +255,39 @@ public class TestHbckChore extends
TestAssignmentManagerBase {
hbckChoreWithChangedConf.choreForTesting();
assertNull(hbckChoreWithChangedConf.getLastReport());
}
+
+ @Test
+ public void testChoreSkipsForeignMetaTables() throws Exception {
+ FileSystem fs = master.getMasterFileSystem().getFileSystem();
+ Path rootDir = master.getMasterFileSystem().getRootDir();
+ String[] metaTables = { "meta_replica1", "meta" };
+ Path hbaseNamespaceDir = new Path(rootDir, HConstants.BASE_NAMESPACE_DIR +
"/hbase");
+ fs.mkdirs(hbaseNamespaceDir);
+
+ for (String metaTable : metaTables) {
+ TableName tableName = TableName.valueOf("hbase", metaTable);
+ Path metaTableDir = new Path(hbaseNamespaceDir, metaTable);
+ fs.mkdirs(metaTableDir);
+ fs.mkdirs(new Path(metaTableDir, FSTableDescriptors.TABLEINFO_DIR));
+ fs.mkdirs(new Path(metaTableDir, "abcdef0123456789"));
+
+ TableDescriptor tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName)
+
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY)
+
.setMaxVersions(HConstants.DEFAULT_HBASE_META_VERSIONS).setInMemory(true)
+ .setBlocksize(HConstants.DEFAULT_HBASE_META_BLOCK_SIZE)
+ .setBloomFilterType(BloomType.ROWCOL).build())
+ .build();
+
+ Path tableDir = CommonFSUtils.getTableDir(rootDir, tableName);
+ FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir,
tableDescriptor,
+ false);
+ }
+
+ assertTrue("HbckChore should run successfully", hbckChore.runChore());
+ HbckReport report = hbckChore.getLastReport();
+ assertNotNull("HbckReport should not be null", report);
+ boolean hasForeignMetaOrphan =
report.getOrphanRegionsOnFS().values().stream()
+ .anyMatch(path -> path.toString().contains("meta_replica1"));
+ assertFalse("HbckChore should not report foreign meta tables as orphans",
hasForeignMetaOrphan);
+ }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
index 5e2b4b52950..6cf83fba8a0 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.junit.AfterClass;
@@ -483,6 +484,38 @@ public class TestFSTableDescriptors {
assertFalse(fs.exists(brokenFile));
}
+ @Test
+ public void testFSTableDescriptorsSkipsForeignMetaTables() throws Exception {
+ FileSystem fs = FileSystem.get(UTIL.getConfiguration());
+ String[] metaTables = { "meta_replica1", "meta" };
+ Path hbaseNamespaceDir = new Path(testDir, HConstants.BASE_NAMESPACE_DIR +
"/hbase");
+ fs.mkdirs(hbaseNamespaceDir);
+
+ for (String metaTable : metaTables) {
+ TableName tableName = TableName.valueOf("hbase", metaTable);
+ Path metaTableDir = new Path(hbaseNamespaceDir, metaTable);
+ fs.mkdirs(metaTableDir);
+ fs.mkdirs(new Path(metaTableDir, FSTableDescriptors.TABLEINFO_DIR));
+ fs.mkdirs(new Path(metaTableDir, "abcdef0123456789"));
+
+ TableDescriptor tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName)
+
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(HConstants.CATALOG_FAMILY)
+
.setMaxVersions(HConstants.DEFAULT_HBASE_META_VERSIONS).setInMemory(true)
+ .setBlocksize(HConstants.DEFAULT_HBASE_META_BLOCK_SIZE)
+ .setBloomFilterType(BloomType.ROWCOL).build())
+ .build();
+
+ Path tableDir = CommonFSUtils.getTableDir(testDir, tableName);
+ FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir,
tableDescriptor,
+ false);
+ }
+ FSTableDescriptors tableDescriptors = new FSTableDescriptors(fs, testDir);
+ Map<String, TableDescriptor> allTables = tableDescriptors.getAll();
+
+ assertFalse("Should not contain meta_replica1",
allTables.containsKey("hbase:meta_replica1"));
+ assertTrue("Should include the local hbase:meta",
allTables.containsKey("hbase:meta"));
+ }
+
private static class FSTableDescriptorsTest extends FSTableDescriptors {
public FSTableDescriptorsTest(FileSystem fs, Path rootdir) {