This is an automated email from the ASF dual-hosted git repository.
apurtell pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/branch-3 by this push:
new 7434c06a36b HBASE-28969 Move HFileLink file creations to SFT (#6459)
7434c06a36b is described below
commit 7434c06a36bcac1342cfa394141acbd2127b76a2
Author: gvprathyusha6 <[email protected]>
AuthorDate: Tue Jan 14 02:48:29 2025 +0530
HBASE-28969 Move HFileLink file creations to SFT (#6459)
Signed-off-by: Andrew Purtell <[email protected]>
Co-authored-by: Prathyusha Garre
<[email protected]>
---
.../java/org/apache/hadoop/hbase/io/HFileLink.java | 170 ---------------------
.../assignment/SplitTableRegionProcedure.java | 53 ++++---
.../hbase/regionserver/HRegionFileSystem.java | 3 +-
.../storefiletracker/StoreFileTracker.java | 27 ++++
.../storefiletracker/StoreFileTrackerBase.java | 54 +++++++
.../hbase/snapshot/RestoreSnapshotHelper.java | 15 +-
.../apache/hadoop/hbase/util/RegionSplitter.java | 6 +-
.../apache/hadoop/hbase/io/hfile/TestPrefetch.java | 9 +-
.../hbase/master/cleaner/TestHFileLinkCleaner.java | 20 ++-
.../hbase/master/snapshot/TestSnapshotManager.java | 16 +-
.../hadoop/hbase/regionserver/TestHStoreFile.java | 7 +-
11 files changed, 167 insertions(+), 213 deletions(-)
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
index dc7ac7338ac..bd5fac1c3c4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -290,175 +289,6 @@ public class HFileLink extends FileLink {
return s;
}
- /**
- * Create a new HFileLink
- * <p>
- * It also adds a back-reference to the hfile back-reference directory to
simplify the
- * reference-count and the cleaning process.
- * @param conf {@link Configuration} to read for the archive
directory name
- * @param fs {@link FileSystem} on which to write the HFileLink
- * @param dstFamilyPath - Destination path (table/region/cf/)
- * @param hfileRegionInfo - Linked HFile Region Info
- * @param hfileName - Linked HFile name
- * @return the file link name.
- * @throws IOException on file or parent directory creation failure.
- */
- public static String create(final Configuration conf, final FileSystem fs,
- final Path dstFamilyPath, final RegionInfo hfileRegionInfo, final String
hfileName)
- throws IOException {
- return create(conf, fs, dstFamilyPath, hfileRegionInfo, hfileName, true);
- }
-
- /**
- * Create a new HFileLink
- * <p>
- * It also adds a back-reference to the hfile back-reference directory to
simplify the
- * reference-count and the cleaning process.
- * @param conf {@link Configuration} to read for the archive
directory name
- * @param fs {@link FileSystem} on which to write the HFileLink
- * @param dstFamilyPath - Destination path (table/region/cf/)
- * @param hfileRegionInfo - Linked HFile Region Info
- * @param hfileName - Linked HFile name
- * @param createBackRef - Whether back reference should be created.
Defaults to true.
- * @return the file link name.
- * @throws IOException on file or parent directory creation failure.
- */
- public static String create(final Configuration conf, final FileSystem fs,
- final Path dstFamilyPath, final RegionInfo hfileRegionInfo, final String
hfileName,
- final boolean createBackRef) throws IOException {
- TableName linkedTable = hfileRegionInfo.getTable();
- String linkedRegion = hfileRegionInfo.getEncodedName();
- return create(conf, fs, dstFamilyPath, linkedTable, linkedRegion,
hfileName, createBackRef);
- }
-
- /**
- * Create a new HFileLink
- * <p>
- * It also adds a back-reference to the hfile back-reference directory to
simplify the
- * reference-count and the cleaning process.
- * @param conf {@link Configuration} to read for the archive
directory name
- * @param fs {@link FileSystem} on which to write the HFileLink
- * @param dstFamilyPath - Destination path (table/region/cf/)
- * @param linkedTable - Linked Table Name
- * @param linkedRegion - Linked Region Name
- * @param hfileName - Linked HFile name
- * @return the file link name.
- * @throws IOException on file or parent directory creation failure.
- */
- public static String create(final Configuration conf, final FileSystem fs,
- final Path dstFamilyPath, final TableName linkedTable, final String
linkedRegion,
- final String hfileName) throws IOException {
- return create(conf, fs, dstFamilyPath, linkedTable, linkedRegion,
hfileName, true);
- }
-
- /**
- * Create a new HFileLink. In the event of link creation failure, this
method throws an
- * IOException, so that the calling upper laying can decide on how to
proceed with this.
- * <p>
- * It also adds a back-reference to the hfile back-reference directory to
simplify the
- * reference-count and the cleaning process.
- * @param conf {@link Configuration} to read for the archive
directory name
- * @param fs {@link FileSystem} on which to write the HFileLink
- * @param dstFamilyPath - Destination path (table/region/cf/)
- * @param linkedTable - Linked Table Name
- * @param linkedRegion - Linked Region Name
- * @param hfileName - Linked HFile name
- * @param createBackRef - Whether back reference should be created. Defaults
to true.
- * @return the file link name.
- * @throws IOException on file or parent directory creation failure.
- */
- public static String create(final Configuration conf, final FileSystem fs,
- final Path dstFamilyPath, final TableName linkedTable, final String
linkedRegion,
- final String hfileName, final boolean createBackRef) throws IOException {
- String familyName = dstFamilyPath.getName();
- String regionName = dstFamilyPath.getParent().getName();
- String tableName =
-
CommonFSUtils.getTableName(dstFamilyPath.getParent().getParent()).getNameAsString();
-
- return create(conf, fs, dstFamilyPath, familyName, tableName, regionName,
linkedTable,
- linkedRegion, hfileName, createBackRef);
- }
-
- /**
- * Create a new HFileLink
- * <p>
- * It also adds a back-reference to the hfile back-reference directory to
simplify the
- * reference-count and the cleaning process.
- * @param conf {@link Configuration} to read for the archive
directory name
- * @param fs {@link FileSystem} on which to write the HFileLink
- * @param dstFamilyPath - Destination path (table/region/cf/)
- * @param dstTableName - Destination table name
- * @param dstRegionName - Destination region name
- * @param linkedTable - Linked Table Name
- * @param linkedRegion - Linked Region Name
- * @param hfileName - Linked HFile name
- * @param createBackRef - Whether back reference should be created. Defaults
to true.
- * @return the file link name.
- * @throws IOException on file or parent directory creation failure
- */
- public static String create(final Configuration conf, final FileSystem fs,
- final Path dstFamilyPath, final String familyName, final String
dstTableName,
- final String dstRegionName, final TableName linkedTable, final String
linkedRegion,
- final String hfileName, final boolean createBackRef) throws IOException {
- String name = createHFileLinkName(linkedTable, linkedRegion, hfileName);
- String refName = createBackReferenceName(dstTableName, dstRegionName);
-
- // Make sure the destination directory exists
- fs.mkdirs(dstFamilyPath);
-
- // Make sure the FileLink reference directory exists
- Path archiveStoreDir =
- HFileArchiveUtil.getStoreArchivePath(conf, linkedTable, linkedRegion,
familyName);
- Path backRefPath = null;
- if (createBackRef) {
- Path backRefssDir = getBackReferencesDir(archiveStoreDir, hfileName);
- fs.mkdirs(backRefssDir);
-
- // Create the reference for the link
- backRefPath = new Path(backRefssDir, refName);
- fs.createNewFile(backRefPath);
- }
- try {
- // Create the link
- if (fs.createNewFile(new Path(dstFamilyPath, name))) {
- return name;
- }
- } catch (IOException e) {
- LOG.error("couldn't create the link=" + name + " for " + dstFamilyPath,
e);
- // Revert the reference if the link creation failed
- if (createBackRef) {
- fs.delete(backRefPath, false);
- }
- throw e;
- }
- throw new IOException(
- "File link=" + name + " already exists under " + dstFamilyPath + "
folder.");
- }
-
- /**
- * Create a new HFileLink starting from a hfileLink name
- * <p>
- * It also adds a back-reference to the hfile back-reference directory to
simplify the
- * reference-count and the cleaning process.
- * @param conf {@link Configuration} to read for the archive
directory name
- * @param fs {@link FileSystem} on which to write the HFileLink
- * @param dstFamilyPath - Destination path (table/region/cf/)
- * @param hfileLinkName - HFileLink name (it contains hfile-region-table)
- * @param createBackRef - Whether back reference should be created. Defaults
to true.
- * @return the file link name.
- * @throws IOException on file or parent directory creation failure.
- */
- public static String createFromHFileLink(final Configuration conf, final
FileSystem fs,
- final Path dstFamilyPath, final String hfileLinkName, final boolean
createBackRef)
- throws IOException {
- Matcher m = LINK_NAME_PATTERN.matcher(hfileLinkName);
- if (!m.matches()) {
- throw new IllegalArgumentException(hfileLinkName + " is not a valid
HFileLink name!");
- }
- return create(conf, fs, dstFamilyPath, TableName.valueOf(m.group(1),
m.group(2)), m.group(3),
- m.group(4), createBackRef);
- }
-
/**
* Create the back reference name
*/
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
index cf312654b2c..1dc6768cb05 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
@@ -685,9 +685,8 @@ public class SplitTableRegionProcedure
// table dir. In case of failure, the proc would go through this again,
already existing
// region dirs and split files would just be ignored, new split files
should get created.
int nbFiles = 0;
- final Map<String, Pair<Collection<StoreFileInfo>, StoreFileTracker>> files
=
- new HashMap<String, Pair<Collection<StoreFileInfo>, StoreFileTracker>>(
- htd.getColumnFamilyCount());
+ final Map<String, Collection<StoreFileInfo>> files =
+ new HashMap<String,
Collection<StoreFileInfo>>(htd.getColumnFamilyCount());
for (ColumnFamilyDescriptor cfd : htd.getColumnFamilies()) {
String family = cfd.getNameAsString();
StoreFileTracker tracker =
@@ -710,7 +709,7 @@ public class SplitTableRegionProcedure
}
if (filteredSfis == null) {
filteredSfis = new ArrayList<StoreFileInfo>(sfis.size());
- files.put(family, new Pair(filteredSfis, tracker));
+ files.put(family, filteredSfis);
}
filteredSfis.add(sfi);
nbFiles++;
@@ -733,12 +732,11 @@ public class SplitTableRegionProcedure
final List<Future<Pair<Path, Path>>> futures = new
ArrayList<Future<Pair<Path, Path>>>(nbFiles);
// Split each store file.
- for (Map.Entry<String, Pair<Collection<StoreFileInfo>, StoreFileTracker>>
e : files
- .entrySet()) {
+ for (Map.Entry<String, Collection<StoreFileInfo>> e : files.entrySet()) {
byte[] familyName = Bytes.toBytes(e.getKey());
final ColumnFamilyDescriptor hcd = htd.getColumnFamily(familyName);
- Pair<Collection<StoreFileInfo>, StoreFileTracker> storeFilesAndTracker =
e.getValue();
- final Collection<StoreFileInfo> storeFiles =
storeFilesAndTracker.getFirst();
+ Collection<StoreFileInfo> storeFileInfos = e.getValue();
+ final Collection<StoreFileInfo> storeFiles = storeFileInfos;
if (storeFiles != null && storeFiles.size() > 0) {
final Configuration storeConfiguration =
StoreUtils.createStoreConfiguration(env.getMasterConfiguration(),
htd, hcd);
@@ -749,9 +747,8 @@ public class SplitTableRegionProcedure
// is running in a regionserver's Store context, or we might not be
able
// to read the hfiles.
storeFileInfo.setConf(storeConfiguration);
- StoreFileSplitter sfs =
- new StoreFileSplitter(regionFs, storeFilesAndTracker.getSecond(),
familyName,
- new HStoreFile(storeFileInfo, hcd.getBloomFilterType(),
CacheConfig.DISABLED));
+ StoreFileSplitter sfs = new StoreFileSplitter(regionFs, htd, hcd,
+ new HStoreFile(storeFileInfo, hcd.getBloomFilterType(),
CacheConfig.DISABLED));
futures.add(threadPool.submit(sfs));
}
}
@@ -817,19 +814,27 @@ public class SplitTableRegionProcedure
}
}
- private Pair<Path, Path> splitStoreFile(HRegionFileSystem regionFs,
StoreFileTracker tracker,
- byte[] family, HStoreFile sf) throws IOException {
+ private Pair<Path, Path> splitStoreFile(HRegionFileSystem regionFs,
TableDescriptor htd,
+ ColumnFamilyDescriptor hcd, HStoreFile sf) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("pid=" + getProcId() + " splitting started for store file: " +
sf.getPath()
+ " for region: " + getParentRegion().getShortNameToLog());
}
final byte[] splitRow = getSplitRow();
- final String familyName = Bytes.toString(family);
+ final String familyName = hcd.getNameAsString();
+ StoreFileTracker daughterOneSft =
+ StoreFileTrackerFactory.create(regionFs.getFileSystem().getConf(), htd,
hcd,
+ HRegionFileSystem.create(regionFs.getFileSystem().getConf(),
regionFs.getFileSystem(),
+ regionFs.getTableDir(), daughterOneRI));
+ StoreFileTracker daughterTwoSft =
+ StoreFileTrackerFactory.create(regionFs.getFileSystem().getConf(), htd,
hcd,
+ HRegionFileSystem.create(regionFs.getFileSystem().getConf(),
regionFs.getFileSystem(),
+ regionFs.getTableDir(), daughterTwoRI));
final Path path_first = regionFs.splitStoreFile(this.daughterOneRI,
familyName, sf, splitRow,
- false, splitPolicy, tracker);
+ false, splitPolicy, daughterOneSft);
final Path path_second = regionFs.splitStoreFile(this.daughterTwoRI,
familyName, sf, splitRow,
- true, splitPolicy, tracker);
+ true, splitPolicy, daughterTwoSft);
if (LOG.isDebugEnabled()) {
LOG.debug("pid=" + getProcId() + " splitting complete for store file: "
+ sf.getPath()
+ " for region: " + getParentRegion().getShortNameToLog());
@@ -843,27 +848,27 @@ public class SplitTableRegionProcedure
*/
private class StoreFileSplitter implements Callable<Pair<Path, Path>> {
private final HRegionFileSystem regionFs;
- private final byte[] family;
+ private final ColumnFamilyDescriptor hcd;
private final HStoreFile sf;
- private final StoreFileTracker tracker;
+ private final TableDescriptor htd;
/**
* Constructor that takes what it needs to split
* @param regionFs the file system
- * @param family Family that contains the store file
+ * @param hcd Family that contains the store file
* @param sf which file
*/
- public StoreFileSplitter(HRegionFileSystem regionFs, StoreFileTracker
tracker, byte[] family,
- HStoreFile sf) {
+ public StoreFileSplitter(HRegionFileSystem regionFs, TableDescriptor htd,
+ ColumnFamilyDescriptor hcd, HStoreFile sf) {
this.regionFs = regionFs;
this.sf = sf;
- this.family = family;
- this.tracker = tracker;
+ this.hcd = hcd;
+ this.htd = htd;
}
@Override
public Pair<Path, Path> call() throws IOException {
- return splitStoreFile(regionFs, tracker, family, sf);
+ return splitStoreFile(regionFs, htd, hcd, sf);
}
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
index cc7b447f3de..6f1ba4f6b40 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
@@ -684,8 +684,7 @@ public class HRegionFileSystem {
hfileName = m.group(4);
}
// must create back reference here
- HFileLink.create(conf, fs, splitDir, familyName,
hri.getTable().getNameAsString(),
- hri.getEncodedName(), linkedTable, linkedRegion, hfileName, true);
+ tracker.createHFileLink(linkedTable, linkedRegion, hfileName, true);
Path path =
new Path(splitDir, HFileLink.createHFileLinkName(linkedTable,
linkedRegion, hfileName));
LOG.info("Created linkFile:" + path.toString() + " for child: " +
hri.getEncodedName()
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java
index 12343b50dd3..7023ff5115a 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTracker.java
@@ -22,6 +22,7 @@ import java.util.Collection;
import java.util.List;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams;
@@ -119,4 +120,30 @@ public interface StoreFileTracker {
StoreFileInfo getStoreFileInfo(final Path initialPath, final boolean
primaryReplica)
throws IOException;
+ /**
+ * Create a new HFileLink
+ * <p>
+ * It also adds a back-reference to the hfile back-reference directory to
simplify the
+ * reference-count and the cleaning process.
+ * @param hfileLinkName - HFileLink name (it contains hfile-region-table)
+ * @param createBackRef - Whether back reference should be created. Defaults
to true.
+ * @return the file link name.
+ * @throws IOException on file or parent directory creation failure.
+ */
+ String createHFileLink(final TableName linkedTable, final String
linkedRegion,
+ final String hfileName, final boolean createBackRef) throws IOException;
+
+ /**
+ * Create a new HFileLink starting from a hfileLink name
+ * <p>
+ * It also adds a back-reference to the hfile back-reference directory to
simplify the
+ * reference-count and the cleaning process.
+ * @param hfileLinkName - HFileLink name (it contains hfile-region-table)
+ * @param createBackRef - Whether back reference should be created. Defaults
to true.
+ * @return the file link name.
+ * @throws IOException on file or parent directory creation failure.
+ */
+ String createFromHFileLink(final String hfileName, final boolean
createBackRef)
+ throws IOException;
+
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java
index 5d0b5b4ae08..33b294ac89b 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerBase.java
@@ -25,12 +25,14 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.List;
+import java.util.regex.Matcher;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.HFileLink;
@@ -48,6 +50,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -319,6 +322,57 @@ abstract class StoreFileTrackerBase implements
StoreFileTracker {
isPrimaryReplica);
}
+ public String createHFileLink(final TableName linkedTable, final String
linkedRegion,
+ final String hfileName, final boolean createBackRef) throws IOException {
+ String name = HFileLink.createHFileLinkName(linkedTable, linkedRegion,
hfileName);
+ String refName =
HFileLink.createBackReferenceName(ctx.getTableName().toString(),
+ ctx.getRegionInfo().getEncodedName());
+
+ FileSystem fs = ctx.getRegionFileSystem().getFileSystem();
+ // Make sure the destination directory exists
+ fs.mkdirs(ctx.getFamilyStoreDirectoryPath());
+
+ // Make sure the FileLink reference directory exists
+ Path archiveStoreDir = HFileArchiveUtil.getStoreArchivePath(conf,
linkedTable, linkedRegion,
+ ctx.getFamily().getNameAsString());
+ Path backRefPath = null;
+ if (createBackRef) {
+ Path backRefssDir = HFileLink.getBackReferencesDir(archiveStoreDir,
hfileName);
+ fs.mkdirs(backRefssDir);
+
+ // Create the reference for the link
+ backRefPath = new Path(backRefssDir, refName);
+ fs.createNewFile(backRefPath);
+ }
+ try {
+ // Create the link
+ if (fs.createNewFile(new Path(ctx.getFamilyStoreDirectoryPath(), name)))
{
+ return name;
+ }
+ } catch (IOException e) {
+ LOG.error("couldn't create the link=" + name + " for " +
ctx.getFamilyStoreDirectoryPath(),
+ e);
+ // Revert the reference if the link creation failed
+ if (createBackRef) {
+ fs.delete(backRefPath, false);
+ }
+ throw e;
+ }
+ throw new IOException("File link=" + name + " already exists under "
+ + ctx.getFamilyStoreDirectoryPath() + " folder.");
+
+ }
+
+ public String createFromHFileLink(final String hfileLinkName, final boolean
createBackRef)
+ throws IOException {
+ Matcher m = HFileLink.LINK_NAME_PATTERN.matcher(hfileLinkName);
+ if (!m.matches()) {
+ throw new IllegalArgumentException(hfileLinkName + " is not a valid
HFileLink name!");
+ }
+ return createHFileLink(TableName.valueOf(m.group(1), m.group(2)),
m.group(3), m.group(4),
+ createBackRef);
+ }
+
/**
* For primary replica, we will call load once when opening a region, and
the implementation could
* choose to do some cleanup work. So here we use {@code readOnly} to
indicate that whether you
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
index 4e20218a57e..a24891483eb 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.HFileArchiver;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -668,8 +669,13 @@ public class RestoreSnapshotHelper {
Configuration sftConf = StoreUtils.createStoreConfiguration(conf,
tableDesc,
tableDesc.getColumnFamily(familyFiles.getFamilyName().toByteArray()));
StoreFileTracker tracker =
- StoreFileTrackerFactory.create(sftConf, true, StoreContext.getBuilder()
-
.withFamilyStoreDirectoryPath(familyDir).withRegionFileSystem(regionFS).build());
+ StoreFileTrackerFactory
+ .create(sftConf, true,
+ StoreContext.getBuilder().withFamilyStoreDirectoryPath(familyDir)
+ .withRegionFileSystem(regionFS)
+ .withColumnFamilyDescriptor(
+
ColumnFamilyDescriptorBuilder.of(familyFiles.getFamilyName().toByteArray()))
+ .build());
for (SnapshotRegionManifest.StoreFile storeFile :
familyFiles.getStoreFilesList()) {
LOG.info("Adding HFileLink " + storeFile.getName() + " from cloned
region " + "in snapshot "
+ snapshotName + " to table=" + tableName);
@@ -726,11 +732,12 @@ public class RestoreSnapshotHelper {
final StoreFileTracker tracker) throws IOException {
String hfileName = storeFile.getName();
if (HFileLink.isHFileLink(hfileName)) {
- return HFileLink.createFromHFileLink(conf, fs, familyDir, hfileName,
createBackRef);
+ return tracker.createFromHFileLink(hfileName, createBackRef);
} else if (StoreFileInfo.isReference(hfileName)) {
return restoreReferenceFile(familyDir, regionInfo, storeFile, tracker);
} else {
- return HFileLink.create(conf, fs, familyDir, regionInfo, hfileName,
createBackRef);
+ return tracker.createHFileLink(regionInfo.getTable(),
regionInfo.getEncodedName(), hfileName,
+ createBackRef);
}
}
diff --git
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
index c7e9166b54a..5d0509ac3d1 100644
---
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
+++
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
@@ -50,6 +50,8 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
+import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker;
+import
org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -680,7 +682,9 @@ public class RegionSplitter {
// Check every Column Family for that region -- check does not
have references.
boolean refFound = false;
for (ColumnFamilyDescriptor c : htd.getColumnFamilies()) {
- if ((refFound = regionFs.hasReferences(c.getNameAsString()))) {
+ StoreFileTracker sft = StoreFileTrackerFactory
+ .create(regionFs.getFileSystem().getConf(), htd, c, regionFs);
+ if ((refFound = sft.hasReferences())) {
break;
}
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
index 096abd80978..73b0f527981 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java
@@ -473,14 +473,15 @@ public class TestPrefetch {
HRegionFileSystem dstRegionFs =
HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
CommonFSUtils.getTableDir(testDir, dstHri.getTable()), dstHri);
Path dstPath = new Path(regionFs.getTableDir(), new
Path(dstHri.getRegionNameAsString(), "cf"));
- HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
- Path linkFilePath =
- new Path(dstPath, HFileLink.createHFileLinkName(hri,
storeFilePath.getName()));
-
StoreFileTracker sft = StoreFileTrackerFactory.create(testConf, false,
StoreContext.getBuilder()
.withFamilyStoreDirectoryPath(new Path(dstRegionFs.getRegionDir(),
"cf"))
+ .withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of("cf"))
.withRegionFileSystem(dstRegionFs).build());
+ sft.createHFileLink(hri.getTable(), hri.getEncodedName(),
storeFilePath.getName(), true);
+ Path linkFilePath =
+ new Path(dstPath, HFileLink.createHFileLinkName(hri,
storeFilePath.getName()));
+
// Try to open store file from link
StoreFileInfo storeFileInfo = sft.getStoreFileInfo(linkFilePath, true);
HStoreFile hsf = new HStoreFile(storeFileInfo, BloomType.NONE, cacheConf);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
index 318927b2f77..62dc4a3e188 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
@@ -30,9 +30,14 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.io.HFileLink;
+import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
+import org.apache.hadoop.hbase.regionserver.StoreContext;
+import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker;
+import
org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.CommonFSUtils;
@@ -121,7 +126,14 @@ public class TestHFileLinkCleaner {
hfilePath = new Path(familyPath, hfileName);
fs.createNewFile(hfilePath);
- createLink(true);
+ HRegionFileSystem regionFS = HRegionFileSystem.create(conf, fs,
+ CommonFSUtils.getTableDir(rootDir, tableLinkName), hriLink);
+ StoreFileTracker sft = StoreFileTrackerFactory.create(conf, true,
+ StoreContext.getBuilder()
+ .withFamilyStoreDirectoryPath(new Path(regionFS.getRegionDir(),
familyName))
+
.withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(familyName))
+ .withRegionFileSystem(regionFS).build());
+ createLink(sft, true);
// Initialize cleaner
conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, TTL);
@@ -129,11 +141,12 @@ public class TestHFileLinkCleaner {
cleaner = new HFileCleaner(1000, server, conf, fs, archiveDir, POOL);
}
- private void createLink(boolean createBackReference) throws IOException {
+ private void createLink(StoreFileTracker sft, boolean createBackReference)
throws IOException {
// Create link to hfile
familyLinkPath = getFamilyDirPath(rootDir, tableLinkName,
hriLink.getEncodedName(), familyName);
fs.mkdirs(familyLinkPath);
- hfileLinkName = HFileLink.create(conf, fs, familyLinkPath, hri, hfileName,
createBackReference);
+ hfileLinkName =
+ sft.createHFileLink(hri.getTable(), hri.getEncodedName(), hfileName,
createBackReference);
linkBackRefDir = HFileLink.getBackReferencesDir(archiveStoreDir,
hfileName);
assertTrue(fs.exists(linkBackRefDir));
backRefs = fs.listStatus(linkBackRefDir);
@@ -162,6 +175,7 @@ public class TestHFileLinkCleaner {
public void testHFileLinkCleaning() throws Exception {
// Link backref cannot be removed
cleaner.chore();
+ // CommonFSUtils.
assertTrue(fs.exists(linkBackRef));
assertTrue(fs.exists(hfilePath));
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
index ab9a9a82605..4b1e4b1d3f5 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.executor.ExecutorService;
@@ -41,6 +42,10 @@ import org.apache.hadoop.hbase.master.cleaner.DirScanPool;
import org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
import org.apache.hadoop.hbase.master.cleaner.HFileLinkCleaner;
import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
+import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
+import org.apache.hadoop.hbase.regionserver.StoreContext;
+import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker;
+import
org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -223,9 +228,14 @@ public class TestSnapshotManager {
Path hfilePath = new Path(familyPath, hfileName);
fs.createNewFile(hfilePath);
// Create link to hfile
- Path familyLinkPath =
- getFamilyDirPath(rootDir, tableLinkName, hriLink.getEncodedName(),
familyName);
- HFileLink.create(conf, fs, familyLinkPath, hri, hfileName);
+ HRegionFileSystem regionFS = HRegionFileSystem.create(conf, fs,
+ CommonFSUtils.getTableDir(rootDir, tableLinkName), hriLink);
+ StoreFileTracker sft = StoreFileTrackerFactory.create(conf, true,
+ StoreContext.getBuilder()
+ .withFamilyStoreDirectoryPath(new Path(regionFS.getRegionDir(),
familyName))
+
.withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(familyName))
+ .withRegionFileSystem(regionFS).build());
+ sft.createHFileLink(hri.getTable(), hri.getEncodedName(), hfileName, true);
Path linkBackRefDir = HFileLink.getBackReferencesDir(archiveStoreDir,
hfileName);
assertTrue(fs.exists(linkBackRefDir));
FileStatus[] backRefs = fs.listStatus(linkBackRefDir);
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
index 66720e13dac..69f549dd6b9 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
@@ -168,6 +168,7 @@ public class TestHStoreFile {
StoreFileTracker sft = StoreFileTrackerFactory.create(conf, false,
StoreContext.getBuilder()
.withFamilyStoreDirectoryPath(new Path(regionFs.getRegionDir(),
TEST_FAMILY))
+
.withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY))
.withRegionFileSystem(regionFs).build());
HStoreFile sf = new HStoreFile(this.fs, sfPath, conf, cacheConf,
BloomType.NONE, true, sft);
checkHalfHFile(regionFs, sf, sft);
@@ -343,7 +344,6 @@ public class TestHStoreFile {
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY,
writer.getPath());
Path dstPath =
new Path(regionFs.getTableDir(), new
Path(dstHri.getRegionNameAsString(), TEST_FAMILY));
- HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
Path linkFilePath =
new Path(dstPath, HFileLink.createHFileLinkName(hri,
storeFilePath.getName()));
@@ -355,7 +355,9 @@ public class TestHStoreFile {
StoreFileTracker sft = StoreFileTrackerFactory.create(testConf, false,
StoreContext.getBuilder()
.withFamilyStoreDirectoryPath(new Path(dstHri.getRegionNameAsString(),
TEST_FAMILY))
+
.withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY))
.withRegionFileSystem(dstRegionFs).build());
+ sft.createHFileLink(hri.getTable(), hri.getEncodedName(),
storeFilePath.getName(), true);
StoreFileInfo storeFileInfo = sft.getStoreFileInfo(linkFilePath, true);
HStoreFile hsf = new HStoreFile(storeFileInfo, BloomType.NONE, cacheConf);
assertTrue(storeFileInfo.isLink());
@@ -406,7 +408,6 @@ public class TestHStoreFile {
HRegionFileSystem cloneRegionFs =
HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
CommonFSUtils.getTableDir(testDir, hri.getTable()), hriClone);
Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY);
- HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
Path linkFilePath =
new Path(dstPath, HFileLink.createHFileLinkName(hri,
storeFilePath.getName()));
@@ -420,7 +421,9 @@ public class TestHStoreFile {
StoreFileTracker sft = StoreFileTrackerFactory.create(testConf, true,
StoreContext.getBuilder()
.withFamilyStoreDirectoryPath(new
Path(hriClone.getRegionNameAsString(), TEST_FAMILY))
+
.withColumnFamilyDescriptor(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY))
.withRegionFileSystem(cloneRegionFs).build());
+ sft.createHFileLink(hri.getTable(), hri.getEncodedName(),
storeFilePath.getName(), true);
HRegionFileSystem splitRegionAFs =
HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
CommonFSUtils.getTableDir(testDir, splitHriA.getTable()), splitHriA);