This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase-operator-tools.git


The following commit(s) were added to refs/heads/master by this push:
     new 70f35da  HBASE-22712 [HBCK2] Remove ClusterConnection dependency in 
hbck2 (#149)
70f35da is described below

commit 70f35da408e84aa1f3831b0cc3a230081f721cd3
Author: Duo Zhang <[email protected]>
AuthorDate: Thu Sep 4 09:38:24 2025 +0800

    HBASE-22712 [HBCK2] Remove ClusterConnection dependency in hbck2 (#149)
    
    Signed-off-by: Nihal Jain <[email protected]>
---
 .../src/main/java/org/apache/hbase/HBCK2.java      | 42 +++++++++++-----------
 .../src/test/java/org/apache/hbase/TestHBCK2.java  | 27 +++++++-------
 .../java/org/apache/hbase/TestRecoverUnknown.java  |  4 +--
 .../org/apache/hbase/TestSchedulingRecoveries.java |  4 +--
 4 files changed, 38 insertions(+), 39 deletions(-)

diff --git a/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java 
b/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
index c569d27..67ca904 100644
--- a/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
+++ b/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
@@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Hbck;
 import org.apache.hadoop.hbase.client.Put;
@@ -151,7 +151,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
    * Check for HBCK support. Expects created connection.
    * @param supportedVersions list of zero or more supported versions.
    */
-  void checkHBCKSupport(ClusterConnection connection, String cmd, String... 
supportedVersions)
+  void checkHBCKSupport(Connection connection, String cmd, String... 
supportedVersions)
     throws IOException {
     if (skipCheck) {
       LOG.info("Skipped {} command version check; 'skip' set", cmd);
@@ -172,7 +172,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
     }
   }
 
-  void checkFunctionSupported(ClusterConnection connection, String cmd) throws 
IOException {
+  void checkFunctionSupported(Connection connection, String cmd) throws 
IOException {
     if (skipCheck) {
       LOG.info("Skipped {} command version check; 'skip' set", cmd);
       return;
@@ -234,12 +234,12 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
     return hbck.setTableStateInMeta(new TableState(tableName, state));
   }
 
-  int setRegionState(ClusterConnection connection, String region, 
RegionState.State newState)
+  int setRegionState(Connection connection, String region, RegionState.State 
newState)
     throws IOException {
     return setRegionState(connection, region, 0, newState);
   }
 
-  int setRegionState(ClusterConnection connection, String[] args) throws 
IOException {
+  int setRegionState(Connection connection, String[] args) throws IOException {
     Options options = new Options();
     Option inputFile = Option.builder("i").longOpt("inputFiles").build();
     options.addOption(inputFile);
@@ -267,7 +267,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
     }
   }
 
-  int setRegionStateByArgs(ClusterConnection connection, String[] args) throws 
IOException {
+  int setRegionStateByArgs(Connection connection, String[] args) throws 
IOException {
     if (args == null || args.length < 3) {
       return EXIT_FAILURE;
     }
@@ -276,7 +276,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
     return setRegionState(connection, args[0], replicaId, state);
   }
 
-  int setRegionState(ClusterConnection connection, String region, int 
replicaId,
+  int setRegionState(Connection connection, String region, int replicaId,
     RegionState.State newState) throws IOException {
     if (newState == null) {
       throw new IllegalArgumentException("State can't be null.");
@@ -571,7 +571,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
     List<Long> pids = 
Arrays.stream(pidStrs).map(Long::valueOf).collect(Collectors.toList());
 
     // Process here
-    try (ClusterConnection connection = connect(); Hbck hbck = 
connection.getHbck()) {
+    try (Connection connection = connect(); Hbck hbck = connection.getHbck()) {
       checkFunctionSupported(connection, BYPASS);
       if (batchSize == NO_BATCH_SIZE) {
         return hbck.bypassProcedure(pids, lockWait, overrideFlag, 
recursiveFlag);
@@ -615,7 +615,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
     CommandLineParser parser = new DefaultParser();
     CommandLine commandLine = parser.parse(options, args, false);
     final boolean fix = commandLine.hasOption(dryRunOption.getOpt());
-    try (ClusterConnection connection = connect()) {
+    try (Connection connection = connect()) {
       new RegionInfoMismatchTool(connection).run(fix);
     }
   }
@@ -1140,8 +1140,8 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
    * Create connection. Needs to be called before we go against remote server. 
Be sure to close when
    * done.
    */
-  ClusterConnection connect() throws IOException {
-    return (ClusterConnection) ConnectionFactory.createConnection(getConf());
+  Connection connect() throws IOException {
+    return ConnectionFactory.createConnection(getConf());
   }
 
   /**
@@ -1166,7 +1166,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
             + " takes tablename and state arguments: e.g. user ENABLED, or a 
list of input files");
           return EXIT_FAILURE;
         }
-        try (ClusterConnection connection = connect(); Hbck hbck = 
connection.getHbck()) {
+        try (Connection connection = connect(); Hbck hbck = 
connection.getHbck()) {
           checkFunctionSupported(connection, command);
           setTableState(hbck, purgeFirst(commands));
         }
@@ -1177,7 +1177,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
           showErrorMessage(command + " takes one or more encoded region 
names");
           return EXIT_FAILURE;
         }
-        try (ClusterConnection connection = connect(); Hbck hbck = 
connection.getHbck()) {
+        try (Connection connection = connect(); Hbck hbck = 
connection.getHbck()) {
           checkFunctionSupported(connection, command);
           System.out.println(assigns(hbck, purgeFirst(commands)));
         }
@@ -1206,7 +1206,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
           showErrorMessage(command + " takes one or more encoded region 
names");
           return EXIT_FAILURE;
         }
-        try (ClusterConnection connection = connect(); Hbck hbck = 
connection.getHbck()) {
+        try (Connection connection = connect(); Hbck hbck = 
connection.getHbck()) {
           checkFunctionSupported(connection, command);
           System.out.println(toString(unassigns(hbck, purgeFirst(commands))));
         }
@@ -1219,13 +1219,13 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
           return EXIT_FAILURE;
         }
 
-        try (ClusterConnection connection = connect()) {
+        try (Connection connection = connect()) {
           checkHBCKSupport(connection, command);
           return setRegionState(connection, purgeFirst(commands));
         }
 
       case FILESYSTEM:
-        try (ClusterConnection connection = connect()) {
+        try (Connection connection = connect()) {
           checkHBCKSupport(connection, command);
           try (FileSystemFsck fsfsck = new FileSystemFsck(getConf())) {
             Pair<CommandLine, List<String>> pair =
@@ -1237,7 +1237,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
         }
 
       case REPLICATION:
-        try (ClusterConnection connection = connect()) {
+        try (Connection connection = connect()) {
           checkHBCKSupport(connection, command, "2.1.1", "2.2.0", "3.0.0");
           try (ReplicationFsck replicationFsck = new 
ReplicationFsck(getConf())) {
             Pair<CommandLine, List<String>> pair =
@@ -1253,7 +1253,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
           showErrorMessage(command + " takes one or more serverNames");
           return EXIT_FAILURE;
         }
-        try (ClusterConnection connection = connect(); Hbck hbck = 
connection.getHbck()) {
+        try (Connection connection = connect(); Hbck hbck = 
connection.getHbck()) {
           checkFunctionSupported(connection, command);
           System.out.println(toString(scheduleRecoveries(hbck, 
purgeFirst(commands))));
         }
@@ -1264,7 +1264,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
           showErrorMessage(command + " doesn't take any arguments");
           return EXIT_FAILURE;
         }
-        try (ClusterConnection connection = connect(); Hbck hbck = 
connection.getHbck()) {
+        try (Connection connection = connect(); Hbck hbck = 
connection.getHbck()) {
           checkFunctionSupported(connection, command);
           System.out.println(toString(recoverUnknown(hbck)));
         }
@@ -1275,7 +1275,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
           showErrorMessage(command + " doesn't take any arguments");
           return EXIT_FAILURE;
         }
-        try (ClusterConnection connection = connect(); Hbck hbck = 
connection.getHbck()) {
+        try (Connection connection = connect(); Hbck hbck = 
connection.getHbck()) {
           checkFunctionSupported(connection, command);
           hbck.fixMeta();
           System.out.println("Server-side processing of fixMeta triggered.");
@@ -1320,7 +1320,7 @@ public class HBCK2 extends Configured implements 
org.apache.hadoop.util.Tool {
         List<String> tableNames = Arrays.asList(purgeFirst(commands));
         MissingTableDescriptorGenerator tableInfoGenerator =
           new MissingTableDescriptorGenerator(getConf());
-        try (ClusterConnection connection = connect()) {
+        try (Connection connection = connect()) {
           
tableInfoGenerator.generateTableDescriptorFileIfMissing(connection.getAdmin(),
             tableNames);
         } catch (IOException e) {
diff --git a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java 
b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
index cce6749..50c97f0 100644
--- a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
+++ b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Hbck;
@@ -116,14 +115,14 @@ public class TestHBCK2 {
 
   @Test(expected = UnsupportedOperationException.class)
   public void testVersions() throws IOException {
-    try (ClusterConnection connection = this.hbck2.connect()) {
+    try (Connection connection = this.hbck2.connect()) {
       this.hbck2.checkHBCKSupport(connection, "test", "10.0.0");
     }
   }
 
   @Test
   public void testSetTableStateInMeta() throws IOException {
-    try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
+    try (Connection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
       TableState state = this.hbck2.setTableState(hbck, TABLE_NAME, 
TableState.State.DISABLED);
       assertTrue("Found=" + state.getState(), state.isEnabled());
       // Restore the state.
@@ -145,7 +144,7 @@ public class TestHBCK2 {
     assertTrue(result.contains("tableName=TestHBCK2, state=ENABLED"));
 
     // Restore the state.
-    try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
+    try (Connection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
       TableState state = this.hbck2.setTableState(hbck, TABLE_NAME, 
TableState.State.ENABLED);
       assertTrue("Found=" + state.getState(), state.isDisabled());
     }
@@ -181,7 +180,7 @@ public class TestHBCK2 {
       String[] regionStrsArray =
         
regions.stream().map(RegionInfo::getEncodedName).toArray(String[]::new);
 
-      try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
+      try (Connection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
         unassigns(regions, regionStrsArray);
         List<Long> pids = this.hbck2.assigns(hbck, regionStrsArray);
         validateRegionEndState(pids, regions, true);
@@ -224,7 +223,7 @@ public class TestHBCK2 {
       RegionInfo info = regions.get(0);
       assertEquals(RegionState.State.OPEN, getCurrentRegionState(info));
       String region = info.getEncodedName();
-      try (ClusterConnection connection = this.hbck2.connect()) {
+      try (Connection connection = this.hbck2.connect()) {
         this.hbck2.setRegionState(connection, region, 
RegionState.State.CLOSING);
       }
       assertEquals(RegionState.State.CLOSING, getCurrentRegionState(info));
@@ -242,7 +241,7 @@ public class TestHBCK2 {
       assertEquals(RegionState.State.OPEN, getCurrentRegionState(info));
       String region = info.getEncodedName();
       String[] args = new String[] { region, "0", "CLOSING" };
-      try (ClusterConnection connection = this.hbck2.connect()) {
+      try (Connection connection = this.hbck2.connect()) {
         this.hbck2.setRegionStateByArgs(connection, args);
       }
       assertEquals(RegionState.State.CLOSING, getCurrentRegionState(info));
@@ -290,7 +289,7 @@ public class TestHBCK2 {
       int replicaId = regions.get(1).getReplicaId();
       assertEquals(RegionState.State.OPEN, 
getCurrentRegionState(regions.get(0), replicaId));
       String primaryRegion = primaryRegionInfo.getEncodedName();
-      try (ClusterConnection connection = this.hbck2.connect()) {
+      try (Connection connection = this.hbck2.connect()) {
         this.hbck2.setRegionState(connection, primaryRegion, 
regions.get(1).getReplicaId(),
           RegionState.State.CLOSING);
       }
@@ -302,7 +301,7 @@ public class TestHBCK2 {
 
   @Test
   public void testSetRegionStateInvalidRegion() throws IOException {
-    try (ClusterConnection connection = this.hbck2.connect()) {
+    try (Connection connection = this.hbck2.connect()) {
       assertEquals(HBCK2.EXIT_FAILURE,
         this.hbck2.setRegionState(connection, "NO_REGION", 
RegionState.State.CLOSING));
     }
@@ -316,7 +315,7 @@ public class TestHBCK2 {
       RegionInfo info = regions.get(0);
       assertEquals(RegionState.State.OPEN, getCurrentRegionState(info));
       String region = info.getEncodedName();
-      try (ClusterConnection connection = this.hbck2.connect()) {
+      try (Connection connection = this.hbck2.connect()) {
         this.hbck2.setRegionState(connection, region, null);
       }
     } finally {
@@ -409,7 +408,7 @@ public class TestHBCK2 {
   }
 
   private void unassigns(List<RegionInfo> regions, String[] regionStrsArray) 
throws IOException {
-    try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
+    try (Connection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
       List<Long> pids = this.hbck2.unassigns(hbck, regionStrsArray);
       waitOnPids(pids);
     }
@@ -503,7 +502,7 @@ public class TestHBCK2 {
 
   @Test(expected = IllegalArgumentException.class)
   public void testSetRegionStateInvalidRegionAndInvalidState() throws 
IOException {
-    try (ClusterConnection connection = this.hbck2.connect()) {
+    try (Connection connection = this.hbck2.connect()) {
       this.hbck2.setRegionState(connection, "NO_REGION", null);
     }
   }
@@ -682,14 +681,14 @@ public class TestHBCK2 {
 
   @Test
   public void testFunctionSupported() throws IOException {
-    try (ClusterConnection connection = this.hbck2.connect()) {
+    try (Connection connection = this.hbck2.connect()) {
       this.hbck2.checkFunctionSupported(connection, "scheduleRecoveries");
     }
   }
 
   @Test(expected = UnsupportedOperationException.class)
   public void testFunctionNotSupported() throws IOException {
-    try (ClusterConnection connection = this.hbck2.connect()) {
+    try (Connection connection = this.hbck2.connect()) {
       this.hbck2.checkFunctionSupported(connection, "test");
     }
   }
diff --git a/hbase-hbck2/src/test/java/org/apache/hbase/TestRecoverUnknown.java 
b/hbase-hbck2/src/test/java/org/apache/hbase/TestRecoverUnknown.java
index 4238781..d71fda5 100644
--- a/hbase-hbck2/src/test/java/org/apache/hbase/TestRecoverUnknown.java
+++ b/hbase-hbck2/src/test/java/org/apache/hbase/TestRecoverUnknown.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals;
 import java.io.IOException;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Hbck;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -55,7 +55,7 @@ public class TestRecoverUnknown {
 
   @Test
   public void testKnownServersNotRecovered() throws IOException {
-    try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
+    try (Connection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
       List<Long> pids = this.hbck2.recoverUnknown(hbck);
       assertEquals(0, pids.size());
     }
diff --git 
a/hbase-hbck2/src/test/java/org/apache/hbase/TestSchedulingRecoveries.java 
b/hbase-hbck2/src/test/java/org/apache/hbase/TestSchedulingRecoveries.java
index 5220cba..a276b5f 100644
--- a/hbase-hbck2/src/test/java/org/apache/hbase/TestSchedulingRecoveries.java
+++ b/hbase-hbck2/src/test/java/org/apache/hbase/TestSchedulingRecoveries.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.IOException;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Hbck;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -53,7 +53,7 @@ public class TestSchedulingRecoveries {
   public void testSchedulingSCPWithTwoGoodHosts() throws IOException {
     String sn1 = TEST_UTIL.getHBaseCluster().getRegionServer(0).toString();
     String sn2 = TEST_UTIL.getHBaseCluster().getRegionServer(1).toString();
-    try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
+    try (Connection connection = this.hbck2.connect(); Hbck hbck = 
connection.getHbck()) {
       List<Long> pids = this.hbck2.scheduleRecoveries(hbck, new String[] { 
sn1, sn2 });
       assertEquals(2, pids.size());
       assertTrue(pids.get(0) > 0);

Reply via email to