This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new f084940881b HBASE-28704 The expired snapshot can be read by CopyTable 
or ExportSnapshot (branch-2) (#6082)
f084940881b is described below

commit f084940881be3c3a41725ce5650506ce37ee2161
Author: lupeng <lupeng_n...@qq.com>
AuthorDate: Wed Jul 17 10:42:16 2024 +0800

    HBASE-28704 The expired snapshot can be read by CopyTable or ExportSnapshot 
(branch-2) (#6082)
    
    Signed-off-by: Duo Zhang <zhang...@apache.org>
    Signed-off-by: Liangjun He <heliang...@apache.org>
---
 .../hadoop/hbase/snapshot/ExportSnapshot.java      | 48 ++++++-----
 .../hadoop/hbase/mapreduce/TestCopyTable.java      | 42 ++++++++++
 .../hadoop/hbase/snapshot/TestExportSnapshot.java  | 96 +++++++++++++++++-----
 .../hbase/snapshot/RestoreSnapshotHelper.java      |  7 ++
 .../hbase/snapshot/TestRestoreSnapshotHelper.java  | 41 +++++++++
 5 files changed, 194 insertions(+), 40 deletions(-)

diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 8c39d7411f1..c8e27e101e3 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -79,6 +79,7 @@ import org.slf4j.LoggerFactory;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
 import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
 
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
@@ -137,9 +138,9 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
     static final Option NO_CHECKSUM_VERIFY = new Option(null, 
"no-checksum-verify", false,
       "Do not verify checksum, use name+length only.");
     static final Option NO_TARGET_VERIFY = new Option(null, 
"no-target-verify", false,
-      "Do not verify the integrity of the exported snapshot.");
-    static final Option NO_SOURCE_VERIFY =
-      new Option(null, "no-source-verify", false, "Do not verify the source of 
the snapshot.");
+      "Do not verify the exported snapshot's expiration status and 
integrity.");
+    static final Option NO_SOURCE_VERIFY = new Option(null, 
"no-source-verify", false,
+      "Do not verify the source snapshot's expiration status and integrity.");
     static final Option OVERWRITE =
       new Option(null, "overwrite", false, "Rewrite the snapshot manifest if 
already exists.");
     static final Option CHUSER =
@@ -933,13 +934,17 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
     }
   }
 
-  private void verifySnapshot(final Configuration baseConf, final FileSystem 
fs, final Path rootDir,
-    final Path snapshotDir) throws IOException {
+  private void verifySnapshot(final SnapshotDescription snapshotDesc, final 
Configuration baseConf,
+    final FileSystem fs, final Path rootDir, final Path snapshotDir) throws 
IOException {
     // Update the conf with the current root dir, since may be a different 
cluster
     Configuration conf = new Configuration(baseConf);
     CommonFSUtils.setRootDir(conf, rootDir);
     CommonFSUtils.setFsDefault(conf, CommonFSUtils.getRootDir(conf));
-    SnapshotDescription snapshotDesc = 
SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
+    boolean isExpired = 
SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(),
+      snapshotDesc.getCreationTime(), EnvironmentEdgeManager.currentTime());
+    if (isExpired) {
+      throw new 
SnapshotTTLExpiredException(ProtobufUtil.createSnapshotDesc(snapshotDesc));
+    }
     SnapshotReferenceUtil.verifySnapshot(conf, fs, snapshotDir, snapshotDesc);
   }
 
@@ -1041,14 +1046,14 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
     if (snapshotName == null) {
       System.err.println("Snapshot name not provided.");
       LOG.error("Use -h or --help for usage instructions.");
-      return 0;
+      return EXIT_FAILURE;
     }
 
     if (outputRoot == null) {
       System.err
         .println("Destination file-system (--" + Options.COPY_TO.getLongOpt() 
+ ") not provided.");
       LOG.error("Use -h or --help for usage instructions.");
-      return 0;
+      return EXIT_FAILURE;
     }
 
     if (targetName == null) {
@@ -1076,11 +1081,14 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
     LOG.debug("outputFs={}, outputRoot={}, skipTmp={}, 
initialOutputSnapshotDir={}", outputFs,
       outputRoot.toString(), skipTmp, initialOutputSnapshotDir);
 
+    // throw CorruptedSnapshotException if we can't read the snapshot info.
+    SnapshotDescription sourceSnapshotDesc =
+      SnapshotDescriptionUtils.readSnapshotInfo(inputFs, snapshotDir);
+
     // Verify snapshot source before copying files
     if (verifySource) {
-      LOG.info("Verify snapshot source, inputFs={}, inputRoot={}, 
snapshotDir={}.",
-        inputFs.getUri(), inputRoot, snapshotDir);
-      verifySnapshot(srcConf, inputFs, inputRoot, snapshotDir);
+      LOG.info("Verify the source snapshot's expiration status and 
integrity.");
+      verifySnapshot(sourceSnapshotDesc, srcConf, inputFs, inputRoot, 
snapshotDir);
     }
 
     // Find the necessary directory which need to change owner and group
@@ -1101,12 +1109,12 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
       if (overwrite) {
         if (!outputFs.delete(outputSnapshotDir, true)) {
           System.err.println("Unable to remove existing snapshot directory: " 
+ outputSnapshotDir);
-          return 1;
+          return EXIT_FAILURE;
         }
       } else {
         System.err.println("The snapshot '" + targetName + "' already exists 
in the destination: "
           + outputSnapshotDir);
-        return 1;
+        return EXIT_FAILURE;
       }
     }
 
@@ -1117,7 +1125,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
           if (!outputFs.delete(snapshotTmpDir, true)) {
             System.err
               .println("Unable to remove existing snapshot tmp directory: " + 
snapshotTmpDir);
-            return 1;
+            return EXIT_FAILURE;
           }
         } else {
           System.err
@@ -1126,7 +1134,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
             .println("Please check " + snapshotTmpDir + ". If the snapshot has 
completed, ");
           System.err
             .println("consider removing " + snapshotTmpDir + " by using the 
-overwrite option");
-          return 1;
+          return EXIT_FAILURE;
         }
       }
     }
@@ -1205,19 +1213,21 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
       // Step 4 - Verify snapshot integrity
       if (verifyTarget) {
-        LOG.info("Verify snapshot integrity");
-        verifySnapshot(destConf, outputFs, outputRoot, outputSnapshotDir);
+        LOG.info("Verify the exported snapshot's expiration status and 
integrity.");
+        SnapshotDescription targetSnapshotDesc =
+          SnapshotDescriptionUtils.readSnapshotInfo(outputFs, 
outputSnapshotDir);
+        verifySnapshot(targetSnapshotDesc, destConf, outputFs, outputRoot, 
outputSnapshotDir);
       }
 
       LOG.info("Export Completed: " + targetName);
-      return 0;
+      return EXIT_SUCCESS;
     } catch (Exception e) {
       LOG.error("Snapshot export failed", e);
       if (!skipTmp) {
         outputFs.delete(snapshotTmpDir, true);
       }
       outputFs.delete(outputSnapshotDir, true);
-      return 1;
+      return EXIT_FAILURE;
     }
   }
 
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
index da420cfe7a7..9992f28b70a 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
@@ -19,23 +19,31 @@ package org.apache.hadoop.hbase.mapreduce;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThrows;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.Map;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.SnapshotDescription;
+import org.apache.hadoop.hbase.client.SnapshotType;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
+import org.apache.hadoop.hbase.snapshot.SnapshotTTLExpiredException;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -210,6 +218,40 @@ public class TestCopyTable extends CopyTableTestBase {
     testCopyTableBySnapshot("testLoadingSnapshotToTable", false, false);
   }
 
+  @Test
+  public void testLoadingTtlExpiredSnapshotToTable() throws Exception {
+    String tablePrefix = "testLoadingExpiredSnapshotToTable";
+    TableName table1 = TableName.valueOf(tablePrefix + 1);
+    TableName table2 = TableName.valueOf(tablePrefix + 2);
+    Table t1 = createTable(table1, FAMILY_A, false);
+    createTable(table2, FAMILY_A, false);
+    loadData(t1, FAMILY_A, Bytes.toBytes("qualifier"));
+    String snapshot = tablePrefix + "_snapshot";
+    Map<String, Object> properties = new HashMap<>();
+    properties.put("TTL", 10);
+    SnapshotDescription snapshotDescription = new 
SnapshotDescription(snapshot, table1,
+      SnapshotType.FLUSH, null, EnvironmentEdgeManager.currentTime(), -1, 
properties);
+    TEST_UTIL.getAdmin().snapshot(snapshotDescription);
+    boolean isExist =
+      TEST_UTIL.getAdmin().listSnapshots().stream().anyMatch(ele -> 
snapshot.equals(ele.getName()));
+    assertTrue(isExist);
+    int retry = 6;
+    while (
+      !SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
+        snapshotDescription.getCreationTime(), 
EnvironmentEdgeManager.currentTime()) && retry > 0
+    ) {
+      retry--;
+      Thread.sleep(10 * 1000);
+    }
+    boolean isExpiredSnapshot =
+      SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
+        snapshotDescription.getCreationTime(), 
EnvironmentEdgeManager.currentTime());
+    assertTrue(isExpiredSnapshot);
+    String[] args = new String[] { "--snapshot", "--new.name=" + table2, 
"--bulkload", snapshot };
+    assertThrows(SnapshotTTLExpiredException.class,
+      () -> runCopy(TEST_UTIL.getConfiguration(), args));
+  }
+
   @Test
   public void tsetLoadingSnapshotToMobTable() throws Exception {
     testCopyTableBySnapshot("testLoadingSnapshotToMobTable", false, true);
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
index 03e5d663405..cfb671a707b 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
@@ -44,12 +44,14 @@ import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.SnapshotType;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
 import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
+import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -282,6 +284,39 @@ public class TestExportSnapshot {
     }
   }
 
+  @Test
+  public void testExportExpiredSnapshot() throws Exception {
+    String name = "testExportExpiredSnapshot";
+    TableName tableName = TableName.valueOf(name);
+    String snapshotName = "snapshot-" + name;
+    createTable(tableName);
+    SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
+    Map<String, Object> properties = new HashMap<>();
+    properties.put("TTL", 10);
+    org.apache.hadoop.hbase.client.SnapshotDescription snapshotDescription =
+      new org.apache.hadoop.hbase.client.SnapshotDescription(snapshotName, 
tableName,
+        SnapshotType.FLUSH, null, EnvironmentEdgeManager.currentTime(), -1, 
properties);
+    admin.snapshot(snapshotDescription);
+    boolean isExist =
+      admin.listSnapshots().stream().anyMatch(ele -> 
snapshotName.equals(ele.getName()));
+    assertTrue(isExist);
+    int retry = 6;
+    while (
+      !SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
+        snapshotDescription.getCreationTime(), 
EnvironmentEdgeManager.currentTime()) && retry > 0
+    ) {
+      retry--;
+      Thread.sleep(10 * 1000);
+    }
+    boolean isExpiredSnapshot =
+      SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
+        snapshotDescription.getCreationTime(), 
EnvironmentEdgeManager.currentTime());
+    assertTrue(isExpiredSnapshot);
+    int res = runExportSnapshot(TEST_UTIL.getConfiguration(), snapshotName, 
snapshotName,
+      TEST_UTIL.getDefaultRootDirPath(), getHdfsDestinationDir(), false, 
false, false, true, true);
+    assertTrue(res == AbstractHBaseTool.EXIT_FAILURE);
+  }
+
   private void testExportFileSystemState(final TableName tableName, final 
byte[] snapshotName,
     final byte[] targetName, int filesExpected) throws Exception {
     testExportFileSystemState(tableName, snapshotName, targetName, 
filesExpected,
@@ -313,29 +348,10 @@ public class TestExportSnapshot {
     FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
     FileSystem srcFs = srcDir.getFileSystem(conf);
     Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), 
tgtFs.getWorkingDirectory());
-    LOG.info("tgtFsUri={}, tgtDir={}, rawTgtDir={}, srcFsUri={}, srcDir={}", 
tgtFs.getUri(), tgtDir,
-      rawTgtDir, srcFs.getUri(), srcDir);
-    List<String> opts = new ArrayList<>();
-    opts.add("--snapshot");
-    opts.add(Bytes.toString(snapshotName));
-    opts.add("--copy-to");
-    opts.add(tgtDir.toString());
-    if (targetName != snapshotName) {
-      opts.add("--target");
-      opts.add(Bytes.toString(targetName));
-    }
-    if (overwrite) {
-      opts.add("--overwrite");
-    }
-    if (resetTtl) {
-      opts.add("--reset-ttl");
-    }
-    if (!checksumVerify) {
-      opts.add("--no-checksum-verify");
-    }
 
     // Export Snapshot
-    int res = run(conf, new ExportSnapshot(), opts.toArray(new 
String[opts.size()]));
+    int res = runExportSnapshot(conf, Bytes.toString(snapshotName), 
Bytes.toString(targetName),
+      srcDir, rawTgtDir, overwrite, resetTtl, checksumVerify, true, true);
     assertEquals("success " + success + ", res=" + res, success ? 0 : 1, res);
     if (!success) {
       final Path targetDir = new Path(HConstants.SNAPSHOT_DIR_NAME, 
Bytes.toString(targetName));
@@ -468,4 +484,42 @@ public class TestExportSnapshot {
     FileSystem fs = FileSystem.get(path.toUri(), new Configuration());
     fs.delete(path, true);
   }
+
+  private static int runExportSnapshot(final Configuration conf, final String 
sourceSnapshotName,
+    final String targetSnapshotName, final Path srcDir, Path rawTgtDir, final 
boolean overwrite,
+    final boolean resetTtl, final boolean checksumVerify, final boolean 
noSourceVerify,
+    final boolean noTargetVerify) throws Exception {
+    FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
+    FileSystem srcFs = srcDir.getFileSystem(conf);
+    Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), 
tgtFs.getWorkingDirectory());
+    LOG.info("tgtFsUri={}, tgtDir={}, rawTgtDir={}, srcFsUri={}, srcDir={}", 
tgtFs.getUri(), tgtDir,
+      rawTgtDir, srcFs.getUri(), srcDir);
+    List<String> opts = new ArrayList<>();
+    opts.add("--snapshot");
+    opts.add(sourceSnapshotName);
+    opts.add("--copy-to");
+    opts.add(tgtDir.toString());
+    if (!targetSnapshotName.equals(sourceSnapshotName)) {
+      opts.add("--target");
+      opts.add(targetSnapshotName);
+    }
+    if (overwrite) {
+      opts.add("--overwrite");
+    }
+    if (resetTtl) {
+      opts.add("--reset-ttl");
+    }
+    if (!checksumVerify) {
+      opts.add("--no-checksum-verify");
+    }
+    if (!noSourceVerify) {
+      opts.add("--no-source-verify");
+    }
+    if (!noTargetVerify) {
+      opts.add("--no-target-verify");
+    }
+
+    // Export Snapshot
+    return run(conf, new ExportSnapshot(), opts.toArray(new 
String[opts.size()]));
+  }
 }
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 8395456cd76..377cfda03a7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -63,6 +63,7 @@ import 
org.apache.hadoop.hbase.security.access.ShadedAccessControlUtil;
 import org.apache.hadoop.hbase.security.access.TablePermission;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.ModifyRegionUtils;
 import org.apache.hadoop.hbase.util.Pair;
@@ -877,6 +878,12 @@ public class RestoreSnapshotHelper {
 
     Path snapshotDir = 
SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir);
     SnapshotDescription snapshotDesc = 
SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
+    // check if the snapshot is expired.
+    boolean isExpired = 
SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(),
+      snapshotDesc.getCreationTime(), EnvironmentEdgeManager.currentTime());
+    if (isExpired) {
+      throw new 
SnapshotTTLExpiredException(ProtobufUtil.createSnapshotDesc(snapshotDesc));
+    }
     SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, 
snapshotDesc);
 
     MonitoredTask status = TaskMonitor.get()
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
index 3a85193d1da..4c844fc92ea 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
@@ -18,10 +18,13 @@
 package org.apache.hadoop.hbase.snapshot;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThrows;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocatedFileStatus;
@@ -32,6 +35,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.SnapshotType;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@@ -45,6 +49,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSTableDescriptors;
 import org.apache.hadoop.hbase.wal.WALSplitUtil;
 import org.junit.After;
@@ -177,6 +182,42 @@ public class TestRestoreSnapshotHelper {
     }
   }
 
+  @Test
+  public void testCopyExpiredSnapshotForScanner() throws IOException, 
InterruptedException {
+    rootDir = TEST_UTIL.getDefaultRootDirPath();
+    CommonFSUtils.setRootDir(conf, rootDir);
+    TableName tableName = 
TableName.valueOf("testCopyExpiredSnapshotForScanner");
+    String snapshotName = tableName.getNameAsString() + "-snapshot";
+    Path restoreDir = new 
Path("/hbase/.tmp-expired-snapshot/copySnapshotDest");
+    // create table and put some data into the table
+    byte[] columnFamily = Bytes.toBytes("A");
+    Table table = TEST_UTIL.createTable(tableName, columnFamily);
+    TEST_UTIL.loadTable(table, columnFamily);
+    // create snapshot with ttl = 10 sec
+    Map<String, Object> properties = new HashMap<>();
+    properties.put("TTL", 10);
+    org.apache.hadoop.hbase.client.SnapshotDescription snapshotDesc =
+      new org.apache.hadoop.hbase.client.SnapshotDescription(snapshotName, 
tableName,
+        SnapshotType.FLUSH, null, EnvironmentEdgeManager.currentTime(), -1, 
properties);
+    TEST_UTIL.getAdmin().snapshot(snapshotDesc);
+    boolean isExist = TEST_UTIL.getAdmin().listSnapshots().stream()
+      .anyMatch(ele -> snapshotName.equals(ele.getName()));
+    assertTrue(isExist);
+    int retry = 6;
+    while (
+      !SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(),
+        snapshotDesc.getCreationTime(), EnvironmentEdgeManager.currentTime()) 
&& retry > 0
+    ) {
+      retry--;
+      Thread.sleep(10 * 1000);
+    }
+    boolean isExpiredSnapshot = 
SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(),
+      snapshotDesc.getCreationTime(), EnvironmentEdgeManager.currentTime());
+    assertTrue(isExpiredSnapshot);
+    assertThrows(SnapshotTTLExpiredException.class, () -> RestoreSnapshotHelper
+      .copySnapshotForScanner(conf, fs, rootDir, restoreDir, snapshotName));
+  }
+
   protected void createTableAndSnapshot(TableName tableName, String 
snapshotName)
     throws IOException {
     byte[] column = Bytes.toBytes("A");

Reply via email to