This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-3 by this push:
     new 62bfbd51bb7 HBASE-28082 oldWALs naming can be incompatible with HBase 
backup (#5445)
62bfbd51bb7 is described below

commit 62bfbd51bb7e7efb2d008928e9acd717efa9da4d
Author: Jan Van Besien NGDATA 
<93314377+janvanbesien-ngd...@users.noreply.github.com>
AuthorDate: Sat Oct 7 03:21:40 2023 +0200

    HBASE-28082 oldWALs naming can be incompatible with HBase backup (#5445)
    
    Make the hostname parsing in BackupUtils#parseHostFromOldLog more lenient
    by not making any assumptions about the name of the file other than that
    it starts with a org.apache.hadoop.hbase.ServerName.
    
    Signed-off-by: Duo Zhang <zhang...@apache.org>
    (cherry picked from commit 9262cbc1664edd92d431c27ca5b43c4bac7473c1)
---
 .../hadoop/hbase/backup/util/BackupUtils.java      | 10 ++++---
 .../hadoop/hbase/backup/TestBackupUtils.java       | 33 ++++++++++++++--------
 2 files changed, 28 insertions(+), 15 deletions(-)

diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
index d4e849f610a..d0a04960779 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
@@ -65,6 +65,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
+import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
 import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;
 
 /**
@@ -366,10 +367,11 @@ public final class BackupUtils {
       return null;
     }
     try {
-      String n = p.getName();
-      int idx = n.lastIndexOf(LOGNAME_SEPARATOR);
-      String s = URLDecoder.decode(n.substring(0, idx), "UTF8");
-      return ServerName.valueOf(s).getAddress().toString();
+      String urlDecodedName = URLDecoder.decode(p.getName(), "UTF8");
+      Iterable<String> nameSplitsOnComma = 
Splitter.on(",").split(urlDecodedName);
+      String host = Iterables.get(nameSplitsOnComma, 0);
+      String port = Iterables.get(nameSplitsOnComma, 1);
+      return host + ":" + port;
     } catch (Exception e) {
       LOG.warn("Skip log file (can't parse): {}", p);
       return null;
diff --git 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java
 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java
index a55720777c4..3fc2c31a9d5 100644
--- 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java
+++ 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java
@@ -87,21 +87,32 @@ public class TestBackupUtils {
 
   @Test
   public void testFilesystemWalHostNameParsing() throws IOException {
-    String host = "localhost";
-    int port = 60030;
-    ServerName serverName = ServerName.valueOf(host, port, 1234);
+    String[] hosts =
+      new String[] { "10.20.30.40", "127.0.0.1", "localhost", 
"a-region-server.domain.com" };
+
     Path walRootDir = CommonFSUtils.getWALRootDir(conf);
     Path oldLogDir = new Path(walRootDir, HConstants.HREGION_OLDLOGDIR_NAME);
 
-    Path testWalPath = new Path(oldLogDir,
-      serverName.toString() + BackupUtils.LOGNAME_SEPARATOR + 
EnvironmentEdgeManager.currentTime());
-    Path testMasterWalPath =
-      new Path(oldLogDir, testWalPath.getName() + 
MasterRegionFactory.ARCHIVED_WAL_SUFFIX);
+    int port = 60030;
+    for (String host : hosts) {
+      ServerName serverName = ServerName.valueOf(host, port, 1234);
+
+      Path testOldWalPath = new Path(oldLogDir,
+        serverName + BackupUtils.LOGNAME_SEPARATOR + 
EnvironmentEdgeManager.currentTime());
+      Assert.assertEquals(host + Addressing.HOSTNAME_PORT_SEPARATOR + port,
+        BackupUtils.parseHostFromOldLog(testOldWalPath));
+
+      Path testMasterWalPath =
+        new Path(oldLogDir, testOldWalPath.getName() + 
MasterRegionFactory.ARCHIVED_WAL_SUFFIX);
+      Assert.assertNull(BackupUtils.parseHostFromOldLog(testMasterWalPath));
 
-    String parsedHost = BackupUtils.parseHostFromOldLog(testMasterWalPath);
-    Assert.assertNull(parsedHost);
+      // org.apache.hadoop.hbase.wal.BoundedGroupingStrategy does this
+      Path testOldWalWithRegionGroupingPath = new Path(oldLogDir,
+        serverName + BackupUtils.LOGNAME_SEPARATOR + serverName + 
BackupUtils.LOGNAME_SEPARATOR
+          + "regiongroup-0" + BackupUtils.LOGNAME_SEPARATOR + 
EnvironmentEdgeManager.currentTime());
+      Assert.assertEquals(host + Addressing.HOSTNAME_PORT_SEPARATOR + port,
+        BackupUtils.parseHostFromOldLog(testOldWalWithRegionGroupingPath));
+    }
 
-    parsedHost = BackupUtils.parseHostFromOldLog(testWalPath);
-    Assert.assertEquals(parsedHost, host + Addressing.HOSTNAME_PORT_SEPARATOR 
+ port);
   }
 }

Reply via email to