This is an automated email from the ASF dual-hosted git repository. bbeaudreault pushed a commit to branch branch-2.6 in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-2.6 by this push: new e8d24fab043 HBASE-28460 Full backup restore failed on empty HFiles (#5782) e8d24fab043 is described below commit e8d24fab043160f6dd617beb8be5ddb997776aa8 Author: DieterDP <90392398+dieterdp...@users.noreply.github.com> AuthorDate: Tue Apr 2 14:38:48 2024 +0200 HBASE-28460 Full backup restore failed on empty HFiles (#5782) Signed-off-by: Bryan Beaudreault <bbeaudrea...@apache.org> --- .../backup/mapreduce/MapReduceRestoreJob.java | 5 +-- .../hadoop/hbase/backup/util/RestoreTool.java | 14 ++++++-- .../hadoop/hbase/backup/TestFullRestore.java | 38 ++++++++++++++++++++++ 3 files changed, 50 insertions(+), 7 deletions(-) diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java index 5d654c0d85b..7a2fce4c418 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java @@ -87,10 +87,7 @@ public class MapReduceRestoreJob implements RestoreJob { LOG.debug("Restoring HFiles from directory " + bulkOutputPath); } - if (loader.bulkLoad(newTableNames[i], bulkOutputPath).isEmpty()) { - throw new IOException("Can not restore from backup directory " + dirs - + " (check Hadoop and HBase logs). Bulk loader returns null"); - } + loader.bulkLoad(newTableNames[i], bulkOutputPath); } else { throw new IOException("Can not restore from backup directory " + dirs + " (check Hadoop/MR and HBase logs). Player return code =" + result); diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java index 8ca80d1301f..ff4e2672f7a 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/RestoreTool.java @@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot */ @InterfaceAudience.Private public class RestoreTool { - public static final Logger LOG = LoggerFactory.getLogger(BackupUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(RestoreTool.class); private final static long TABLE_AVAILABILITY_WAIT_TIME = 180000; private final String[] ignoreDirs = { HConstants.RECOVERED_EDITS_DIR }; @@ -437,6 +437,10 @@ public class RestoreTool { HFile.Reader reader = HFile.createReader(fs, hfile, conf); final byte[] first, last; try { + if (reader.getEntries() == 0) { + LOG.debug("Skipping hfile with 0 entries: " + hfile); + continue; + } first = reader.getFirstRowKey().get(); last = reader.getLastRowKey().get(); LOG.debug("Trying to figure out region boundaries hfile=" + hfile + " first=" @@ -491,8 +495,12 @@ public class RestoreTool { admin.createTable(htd); } else { keys = generateBoundaryKeys(regionDirList); - // create table using table descriptor and region boundaries - admin.createTable(htd, keys); + if (keys.length > 0) { + // create table using table descriptor and region boundaries + admin.createTable(htd, keys); + } else { + admin.createTable(htd); + } } } catch (NamespaceNotFoundException e) { LOG.warn("There was no namespace and the same will be created"); diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java index 385a6b3c519..d16d7af7501 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.util.ToolRunner; import org.junit.ClassRule; @@ -71,6 +72,43 @@ public class TestFullRestore extends TestBackupBase { hba.close(); } + @Test + public void testFullRestoreSingleWithRegion() throws Exception { + LOG.info("test full restore on a single table empty table that has a region"); + + // This test creates its own table so other tests are not affected (we adjust it in this test) + TableName tableName = TableName.valueOf("table-full-restore-single-region"); + TEST_UTIL.createTable(tableName, famName); + + Admin admin = TEST_UTIL.getAdmin(); + + // Add & remove data to ensure a region is active, but functionally empty + Table table = TEST_UTIL.getConnection().getTable(tableName); + loadTable(table); + admin.flush(tableName); + TEST_UTIL.deleteTableData(tableName); + admin.flush(tableName); + + TEST_UTIL.compact(tableName, true); + + List<TableName> tables = Lists.newArrayList(tableName); + String backupId = fullTableBackup(tables); + assertTrue(checkSucceeded(backupId)); + + LOG.info("backup complete"); + + TEST_UTIL.deleteTable(tableName); + + TableName[] tableset = new TableName[] { tableName }; + TableName[] tablemap = new TableName[] { tableName }; + BackupAdmin client = getBackupAdmin(); + client.restore(BackupUtils.createRestoreRequest(BACKUP_ROOT_DIR, backupId, false, tableset, + tablemap, false)); + assertTrue(admin.tableExists(tableName)); + TEST_UTIL.deleteTable(tableName); + admin.close(); + } + @Test public void testFullRestoreSingleCommand() throws Exception { LOG.info("test full restore on a single table empty table: command-line");