http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java index 420d77f..c980ae6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java @@ -76,7 +76,7 @@ import org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; -import org.apache.hadoop.hbase.fs.RegionFileSystem.StoreFileVisitor; +import org.apache.hadoop.hbase.fs.RegionStorage.StoreFileVisitor; import org.apache.hadoop.hbase.master.AssignmentManager; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterRpcServices; @@ -1033,11 +1033,11 @@ public class TestSplitTransactionOnCluster { assertEquals(storefiles.size(), 1); assertFalse(region.hasReferences()); Path referencePath = - region.getRegionFileSystem().splitStoreFile(region.getRegionInfo(), "f", + region.getRegionStorage().splitStoreFile(region.getRegionInfo(), "f", storefiles.iterator().next(), Bytes.toBytes("row1"), false, region.getSplitPolicy()); assertNull(referencePath); referencePath = - region.getRegionFileSystem().splitStoreFile(region.getRegionInfo(), "i_f", + region.getRegionStorage().splitStoreFile(region.getRegionInfo(), "i_f", storefiles.iterator().next(), Bytes.toBytes("row1"), false, region.getSplitPolicy()); assertNotNull(referencePath); } finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 7f44324..4fc5a11 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -63,7 +63,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.fs.RegionFileSystem; +import org.apache.hadoop.hbase.fs.RegionStorage; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.CacheConfig; @@ -186,7 +186,7 @@ public class TestStore { final Configuration walConf = new Configuration(conf); FSUtils.setRootDir(walConf, basedir); final WALFactory wals = new WALFactory(walConf, null, methodName); - RegionFileSystem rfs = RegionFileSystem.open(conf, fs, basedir, info, false); + RegionStorage rfs = RegionStorage.open(conf, fs, basedir, info, false); HRegion region = new HRegion(rfs, htd, wals.getWAL(info.getEncodedNameAsBytes(), info.getTable().getNamespace()), null); @@ -774,7 +774,7 @@ public class TestStore { LOG.info("Before flush, we should have no files"); Collection<StoreFileInfo> files = - store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName()); + store.getRegionStorage().getStoreFiles(store.getColumnFamilyName()); Assert.assertEquals(0, files != null ? files.size() : 0); //flush @@ -787,7 +787,7 @@ public class TestStore { } LOG.info("After failed flush, we should still have no files!"); - files = store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName()); + files = store.getRegionStorage().getStoreFiles(store.getColumnFamilyName()); Assert.assertEquals(0, files != null ? files.size() : 0); store.getHRegion().getWAL().close(); return null; @@ -1028,7 +1028,7 @@ public class TestStore { for (int i = 0; i <= index; i++) { sf = it.next(); } - store.getRegionFileSystem().removeStoreFiles(store.getColumnFamilyName(), Lists.newArrayList(sf)); + store.getRegionStorage().removeStoreFiles(store.getColumnFamilyName(), Lists.newArrayList(sf)); } private void closeCompactedFile(int index) throws IOException { http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index e88a948..e5a9f00 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.fs.RegionFileSystem; +import org.apache.hadoop.hbase.fs.RegionStorage; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.hfile.BlockCache; @@ -106,7 +106,7 @@ public class TestStoreFile extends HBaseTestCase { public void testBasicHalfMapFile() throws Exception { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb")); - RegionFileSystem regionFs = RegionFileSystem.open(conf, fs, testDir, hri, true); + RegionStorage regionFs = RegionStorage.open(conf, fs, testDir, hri, true); HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build(); StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) @@ -156,7 +156,7 @@ public class TestStoreFile extends HBaseTestCase { @Test public void testReference() throws IOException { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testReferenceTb")); - RegionFileSystem regionFs = RegionFileSystem.open(conf, fs, testDir, hri, true); + RegionStorage regionFs = RegionStorage.open(conf, fs, testDir, hri, true); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -221,7 +221,7 @@ public class TestStoreFile extends HBaseTestCase { // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ Configuration testConf = new Configuration(this.conf); FSUtils.setRootDir(testConf, testDir); - RegionFileSystem regionFs = RegionFileSystem.open(testConf, fs, testDir, hri, true); + RegionStorage regionFs = RegionStorage.open(testConf, fs, testDir, hri, true); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. @@ -265,7 +265,7 @@ public class TestStoreFile extends HBaseTestCase { // adding legal table name chars to verify regex handles it. HRegionInfo hri = new HRegionInfo(TableName.valueOf("_original-evil-name")); - RegionFileSystem regionFs = RegionFileSystem.open(testConf, fs, testDir, hri, true); + RegionStorage regionFs = RegionStorage.open(testConf, fs, testDir, hri, true); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file> @@ -278,7 +278,7 @@ public class TestStoreFile extends HBaseTestCase { // create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table> HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone")); - RegionFileSystem cloneRegionFs = RegionFileSystem.open(testConf, fs, testDir, hriClone, true); + RegionStorage cloneRegionFs = RegionStorage.open(testConf, fs, testDir, hriClone, true); Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY); HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); Path linkFilePath = new Path(dstPath, @@ -331,7 +331,7 @@ public class TestStoreFile extends HBaseTestCase { assertEquals((LAST_CHAR - FIRST_CHAR + 1) * (LAST_CHAR - FIRST_CHAR + 1), count); } - private void checkHalfHFile(final RegionFileSystem regionFs, final StoreFile f) + private void checkHalfHFile(final RegionStorage regionFs, final StoreFile f) throws IOException { Cell midkey = f.createReader().midkey(); KeyValue midKV = (KeyValue)midkey; @@ -995,7 +995,7 @@ public class TestStoreFile extends HBaseTestCase { assertEquals(startEvicted, cs.getEvictedCount()); } - private Path splitStoreFile(final RegionFileSystem regionFs, final HRegionInfo hri, + private Path splitStoreFile(final RegionStorage regionFs, final HRegionInfo hri, final String family, final StoreFile sf, final byte[] splitKey, boolean isTopRef) throws IOException { FileSystem fs = regionFs.getFileSystem(); http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java index 7435567..de193cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java @@ -46,8 +46,8 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.fs.RegionFileSystem; -import org.apache.hadoop.hbase.fs.legacy.LegacyRegionFileSystem; +import org.apache.hadoop.hbase.fs.RegionStorage; +import org.apache.hadoop.hbase.fs.legacy.LegacyRegionStorage; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; @@ -81,9 +81,9 @@ public class TestStoreFileRefresherChore { return htd; } - static class FailingHRegionFileSystem extends LegacyRegionFileSystem { + static class FailingHRegionStorage extends LegacyRegionStorage { boolean fail = false; - FailingHRegionFileSystem(Configuration conf, FileSystem fs, Path tableDir, HRegionInfo regionInfo) { + FailingHRegionStorage(Configuration conf, FileSystem fs, Path tableDir, HRegionInfo regionInfo) { super(conf, fs, tableDir, regionInfo); } @@ -103,7 +103,7 @@ public class TestStoreFileRefresherChore { HRegionInfo info = new HRegionInfo(htd.getTableName(), startKey, stopKey, false, 0, replicaId); - RegionFileSystem fs = new FailingHRegionFileSystem(conf, tableDir.getFileSystem(conf), tableDir, + RegionStorage fs = new FailingHRegionStorage(conf, tableDir.getFileSystem(conf), tableDir, info); final Configuration walConf = new Configuration(conf); FSUtils.setRootDir(walConf, tableDir); @@ -194,7 +194,7 @@ public class TestStoreFileRefresherChore { verifyData(replica1, 0, 100, qf, families); // simulate an fs failure where we cannot refresh the store files for the replica - ((FailingHRegionFileSystem)((HRegion)replica1).getRegionFileSystem()).fail = true; + ((FailingHRegionStorage)((HRegion)replica1).getRegionStorage()).fail = true; // write some more data to primary and flush putData(primary, 100, 100, qf, families); http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java index faa539e..73ce47c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java @@ -643,7 +643,7 @@ public abstract class AbstractTestWALReplay { for (HColumnDescriptor hcd: htd.getFamilies()) { cf_count++; if (cf_count == 2) { - region.getRegionFileSystem().deleteFamily(hcd.getNameAsString()); + region.getRegionStorage().deleteFamily(hcd.getNameAsString()); } } @@ -963,7 +963,7 @@ public abstract class AbstractTestWALReplay { final int countPerFamily = 10; final HTableDescriptor htd = createBasic1FamilyHTD(tableName); HRegion region1 = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd); - Path regionDir = region1.getRegionFileSystem().getRegionDir(); + Path regionDir = region1.getRegionStorage().getRegionDir(); HBaseTestingUtility.closeRegionAndWAL(region1); WAL wal = createWAL(this.conf, hbaseRootDir, logName); http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index d943ccd..d8ceafb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -56,7 +56,7 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.fs.MasterFileSystem; -import org.apache.hadoop.hbase.fs.RegionFileSystem; +import org.apache.hadoop.hbase.fs.RegionStorage; import org.apache.hadoop.hbase.fs.legacy.LegacyTableDescriptor; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.master.HMaster; @@ -701,7 +701,7 @@ public final class SnapshotTestingUtils { // First region, simple with one plain hfile. HRegionInfo hri = new HRegionInfo(htd.getTableName(), startKey, endKey); - RegionFileSystem rfs = RegionFileSystem.open(conf, fs, tableDir, hri, true); + RegionStorage rfs = RegionStorage.open(conf, fs, tableDir, hri, true); regions[i] = new RegionData(tableDir, hri, 3); for (int j = 0; j < regions[i].files.length; ++j) { Path storeFile = createStoreFile(rfs.createTempName()); @@ -713,7 +713,7 @@ public final class SnapshotTestingUtils { startKey = Bytes.toBytes(2 + i * 2); endKey = Bytes.toBytes(3 + i * 2); hri = new HRegionInfo(htd.getTableName()); - rfs = RegionFileSystem.open(conf, fs, tableDir, hri, true); + rfs = RegionStorage.open(conf, fs, tableDir, hri, true); regions[i+1] = new RegionData(tableDir, hri, regions[i].files.length); for (int j = 0; j < regions[i].files.length; ++j) { String refName = regions[i].files[j].getName() + '.' + regions[i].hri.getEncodedName(); http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java index 7b5aed5..ab974e76 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java @@ -227,7 +227,7 @@ public class HFileArchiveTestingUtil { public static Path getStoreArchivePath(Configuration conf, HRegion region, Store store) throws IOException { return HFileArchiveUtil.getStoreArchivePath(conf, region.getRegionInfo(), - region.getRegionFileSystem().getTableDir(), store.getFamily().getName()); + region.getRegionStorage().getTableDir(), store.getFamily().getName()); } public static Path getStoreArchivePath(HBaseTestingUtility util, String tableName, http://git-wip-us.apache.org/repos/asf/hbase/blob/9fddf3cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java index f5c2ac4..4697aa1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java @@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.fs.RegionFileSystem; +import org.apache.hadoop.hbase.fs.RegionStorage; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; @@ -206,7 +206,7 @@ public class OfflineMetaRebuildTestCore { HRegionInfo hri = new HRegionInfo(htbl.getName(), startKey, endKey); LOG.info("manually adding regioninfo and hdfs data: " + hri.toString()); - RegionFileSystem rfs = RegionFileSystem.open(conf, hri, true); + RegionStorage rfs = RegionStorage.open(conf, hri, true); // add to meta. MetaTableAccessor.addRegionToMeta(meta, hri);