[1/2] hbase git commit: HBASE-13658 Improve the test run time for TestAccessController class (Ashish Singhi)

2015-05-22 Thread ssrungarapu
Repository: hbase
Updated Branches:
  refs/heads/master c2d73f02e - 766298079


http://git-wip-us.apache.org/repos/asf/hbase/blob/76629807/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 65ca0be..f2d3dff 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -106,14 +106,10 @@ import 
org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.SecurityTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
-import org.apache.hadoop.hbase.util.TestTableName;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -137,7 +133,7 @@ public class TestAccessController extends SecureTestUtil {
 Logger.getLogger(TableAuthManager.class).setLevel(Level.TRACE);
   }
 
-  @Rule public TestTableName TEST_TABLE = new TestTableName();
+  private static TableName TEST_TABLE = TableName.valueOf(testtable1);
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
   private static Configuration conf;
 
@@ -176,7 +172,7 @@ public class TestAccessController extends SecureTestUtil {
   private static MasterCoprocessorEnvironment CP_ENV;
   private static AccessController ACCESS_CONTROLLER;
   private static RegionServerCoprocessorEnvironment RSCP_ENV;
-  private RegionCoprocessorEnvironment RCP_ENV;
+  private static RegionCoprocessorEnvironment RCP_ENV;
 
   @BeforeClass
   public static void setupBeforeClass() throws Exception {
@@ -219,25 +215,24 @@ public class TestAccessController extends SecureTestUtil {
 USER_ADMIN_CF = User.createUserForTesting(conf, col_family_admin, new 
String[0]);
 
 systemUserConnection = TEST_UTIL.getConnection();
+setUpTableAndUserPermissions();
   }
 
   @AfterClass
   public static void tearDownAfterClass() throws Exception {
+cleanUp();
 TEST_UTIL.shutdownMiniCluster();
   }
 
-  @Before
-  public void setUp() throws Exception {
-// Create the test table (owner added to the _acl_ table)
-Admin admin = TEST_UTIL.getHBaseAdmin();
-HTableDescriptor htd = new HTableDescriptor(TEST_TABLE.getTableName());
+  private static void setUpTableAndUserPermissions() throws Exception {
+HTableDescriptor htd = new HTableDescriptor(TEST_TABLE);
 HColumnDescriptor hcd = new HColumnDescriptor(TEST_FAMILY);
 hcd.setMaxVersions(100);
 htd.addFamily(hcd);
 htd.setOwner(USER_OWNER);
 createTable(TEST_UTIL, htd, new byte[][] { Bytes.toBytes(s) });
 
-Region region = 
TEST_UTIL.getHBaseCluster().getRegions(TEST_TABLE.getTableName()).get(0);
+Region region = TEST_UTIL.getHBaseCluster().getRegions(TEST_TABLE).get(0);
 RegionCoprocessorHost rcpHost = region.getCoprocessorHost();
 RCP_ENV = rcpHost.createEnvironment(AccessController.class, 
ACCESS_CONTROLLER,
   Coprocessor.PRIORITY_HIGHEST, 1, conf);
@@ -251,26 +246,26 @@ public class TestAccessController extends SecureTestUtil {
   Permission.Action.WRITE);
 
 grantOnTable(TEST_UTIL, USER_RW.getShortName(),
-  TEST_TABLE.getTableName(), TEST_FAMILY, null,
+  TEST_TABLE, TEST_FAMILY, null,
   Permission.Action.READ,
   Permission.Action.WRITE);
 
 // USER_CREATE is USER_RW plus CREATE permissions
 grantOnTable(TEST_UTIL, USER_CREATE.getShortName(),
-  TEST_TABLE.getTableName(), null, null,
+  TEST_TABLE, null, null,
   Permission.Action.CREATE,
   Permission.Action.READ,
   Permission.Action.WRITE);
 
 grantOnTable(TEST_UTIL, USER_RO.getShortName(),
-  TEST_TABLE.getTableName(), TEST_FAMILY, null,
+  TEST_TABLE, TEST_FAMILY, null,
   Permission.Action.READ);
 
 grantOnTable(TEST_UTIL, USER_ADMIN_CF.getShortName(),
-  TEST_TABLE.getTableName(), TEST_FAMILY,
+  TEST_TABLE, TEST_FAMILY,
   null, Permission.Action.ADMIN, Permission.Action.CREATE);
 
-assertEquals(5, AccessControlLists.getTablePermissions(conf, 
TEST_TABLE.getTableName()).size());
+assertEquals(5, AccessControlLists.getTablePermissions(conf, 
TEST_TABLE).size());
 try {
   assertEquals(5, 
AccessControlClient.getUserPermissions(systemUserConnection,
   TEST_TABLE.toString()).size());
@@ -279,21 +274,20 @@ public class TestAccessController extends SecureTestUtil {
 }
   }
 
-  @After
-  public void 

[2/2] hbase git commit: HBASE-13658 Improve the test run time for TestAccessController class (Ashish Singhi)

2015-05-22 Thread ssrungarapu
HBASE-13658 Improve the test run time for TestAccessController class (Ashish 
Singhi)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/76629807
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/76629807
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/76629807

Branch: refs/heads/master
Commit: 7662980798c82a239c4d446c290595836ed3a456
Parents: c2d73f0
Author: Srikanth Srungarapu ssrungar...@cloudera.com
Authored: Fri May 22 14:25:16 2015 -0700
Committer: Srikanth Srungarapu ssrungar...@cloudera.com
Committed: Fri May 22 14:25:16 2015 -0700

--
 .../security/access/TestAccessController.java   | 1385 +-
 1 file changed, 707 insertions(+), 678 deletions(-)
--




hbase git commit: HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li xiang)

2015-05-22 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 47c0105d6 - b53db01df


HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li 
xiang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b53db01d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b53db01d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b53db01d

Branch: refs/heads/branch-1.1
Commit: b53db01dfab7c68de12825ad73a236a3337e449d
Parents: 47c0105
Author: tedyu yuzhih...@gmail.com
Authored: Fri May 22 19:04:58 2015 -0700
Committer: tedyu yuzhih...@gmail.com
Committed: Fri May 22 19:04:58 2015 -0700

--
 .../java/org/apache/hadoop/hbase/mapreduce/Driver.java  | 12 +---
 .../apache/hadoop/hbase/snapshot/ExportSnapshot.java|  2 ++
 2 files changed, 11 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b53db01d/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
index 4371894..9737b55 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;
+import org.apache.hadoop.hbase.snapshot.ExportSnapshot;
 import org.apache.hadoop.util.ProgramDriver;
 
 /**
@@ -37,21 +38,26 @@ public class Driver {
*/
   public static void main(String[] args) throws Throwable {
 ProgramDriver pgd = new ProgramDriver();
+
 pgd.addClass(RowCounter.NAME, RowCounter.class,
-  Count rows in HBase table);
+  Count rows in HBase table.);
 pgd.addClass(CellCounter.NAME, CellCounter.class,
-  Count cells in HBase table);
+  Count cells in HBase table.);
 pgd.addClass(Export.NAME, Export.class, Write table data to HDFS.);
 pgd.addClass(Import.NAME, Import.class, Import data written by Export.);
 pgd.addClass(ImportTsv.NAME, ImportTsv.class, Import data in TSV 
format.);
 pgd.addClass(LoadIncrementalHFiles.NAME, LoadIncrementalHFiles.class,
  Complete a bulk data load.);
 pgd.addClass(CopyTable.NAME, CopyTable.class,
-Export a table from local cluster to peer cluster);
+Export a table from local cluster to peer cluster.);
 pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, Compare +
  the data from tables in two different clusters. WARNING: It +
  doesn't work for incrementColumnValues'd cells since the +
  timestamp is changed after being appended to the log.);
+pgd.addClass(WALPlayer.NAME, WALPlayer.class, Replay WAL files.);
+pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, Export +
+ the specific snapshot to a given FileSystem.);
+
 ProgramDriver.class.getMethod(driver, new Class [] {String[].class}).
   invoke(pgd, new Object[]{args});
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53db01d/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 2655e2b..5021c74 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -86,6 +86,8 @@ import org.apache.hadoop.util.ToolRunner;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class ExportSnapshot extends Configured implements Tool {
+  public static final String NAME = exportsnapshot;
+
   private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
 
   private static final String MR_NUM_MAPS = mapreduce.job.maps;



hbase git commit: HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li xiang)

2015-05-22 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 766298079 - e93e1ea73


HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li 
xiang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e93e1ea7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e93e1ea7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e93e1ea7

Branch: refs/heads/master
Commit: e93e1ea73c6ce5f632636c613068ddd877ebb60d
Parents: 7662980
Author: tedyu yuzhih...@gmail.com
Authored: Fri May 22 19:32:51 2015 -0700
Committer: tedyu yuzhih...@gmail.com
Committed: Fri May 22 19:32:51 2015 -0700

--
 .../java/org/apache/hadoop/hbase/mapreduce/Driver.java  | 12 +---
 .../apache/hadoop/hbase/snapshot/ExportSnapshot.java|  2 ++
 2 files changed, 11 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e93e1ea7/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
index 4371894..9737b55 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;
+import org.apache.hadoop.hbase.snapshot.ExportSnapshot;
 import org.apache.hadoop.util.ProgramDriver;
 
 /**
@@ -37,21 +38,26 @@ public class Driver {
*/
   public static void main(String[] args) throws Throwable {
 ProgramDriver pgd = new ProgramDriver();
+
 pgd.addClass(RowCounter.NAME, RowCounter.class,
-  Count rows in HBase table);
+  Count rows in HBase table.);
 pgd.addClass(CellCounter.NAME, CellCounter.class,
-  Count cells in HBase table);
+  Count cells in HBase table.);
 pgd.addClass(Export.NAME, Export.class, Write table data to HDFS.);
 pgd.addClass(Import.NAME, Import.class, Import data written by Export.);
 pgd.addClass(ImportTsv.NAME, ImportTsv.class, Import data in TSV 
format.);
 pgd.addClass(LoadIncrementalHFiles.NAME, LoadIncrementalHFiles.class,
  Complete a bulk data load.);
 pgd.addClass(CopyTable.NAME, CopyTable.class,
-Export a table from local cluster to peer cluster);
+Export a table from local cluster to peer cluster.);
 pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, Compare +
  the data from tables in two different clusters. WARNING: It +
  doesn't work for incrementColumnValues'd cells since the +
  timestamp is changed after being appended to the log.);
+pgd.addClass(WALPlayer.NAME, WALPlayer.class, Replay WAL files.);
+pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, Export +
+ the specific snapshot to a given FileSystem.);
+
 ProgramDriver.class.getMethod(driver, new Class [] {String[].class}).
   invoke(pgd, new Object[]{args});
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/e93e1ea7/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 2655e2b..5021c74 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -86,6 +86,8 @@ import org.apache.hadoop.util.ToolRunner;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class ExportSnapshot extends Configured implements Tool {
+  public static final String NAME = exportsnapshot;
+
   private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
 
   private static final String MR_NUM_MAPS = mapreduce.job.maps;



hbase git commit: HBASE-13752 Temporarily disable TestCorruptedRegionStoreFile on 0.98

2015-05-22 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 24d1d390b - ac3536ce2


HBASE-13752 Temporarily disable TestCorruptedRegionStoreFile on 0.98


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ac3536ce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ac3536ce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ac3536ce

Branch: refs/heads/0.98
Commit: ac3536ce2c396ba74e994ae294d55a59e03a45a8
Parents: 24d1d39
Author: Andrew Purtell apurt...@apache.org
Authored: Fri May 22 16:41:11 2015 -0700
Committer: Andrew Purtell apurt...@apache.org
Committed: Fri May 22 16:41:11 2015 -0700

--
 .../hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java| 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ac3536ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
index d9d00f5..b0c7e2e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.util.TestTableName;
 
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -54,6 +55,7 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 @Category(LargeTests.class)
+@Ignore(See HBASE-13744)
 public class TestCorruptedRegionStoreFile {
   private static final Log LOG = 
LogFactory.getLog(TestCorruptedRegionStoreFile.class);
 



hbase git commit: HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li xiang)

2015-05-22 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/branch-1 a6cf9c51d - ee5263db8


HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li 
xiang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ee5263db
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ee5263db
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ee5263db

Branch: refs/heads/branch-1
Commit: ee5263db8cf38a30c6bd893a3cc0171dd02cd9bb
Parents: a6cf9c5
Author: Jerry He jerry...@apache.org
Authored: Fri May 22 12:40:27 2015 -0700
Committer: Jerry He jerry...@apache.org
Committed: Fri May 22 12:40:27 2015 -0700

--
 .../java/org/apache/hadoop/hbase/mapreduce/Driver.java  | 12 +---
 .../apache/hadoop/hbase/snapshot/ExportSnapshot.java|  2 ++
 2 files changed, 11 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ee5263db/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
index 4371894..9737b55 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;
+import org.apache.hadoop.hbase.snapshot.ExportSnapshot;
 import org.apache.hadoop.util.ProgramDriver;
 
 /**
@@ -37,21 +38,26 @@ public class Driver {
*/
   public static void main(String[] args) throws Throwable {
 ProgramDriver pgd = new ProgramDriver();
+
 pgd.addClass(RowCounter.NAME, RowCounter.class,
-  Count rows in HBase table);
+  Count rows in HBase table.);
 pgd.addClass(CellCounter.NAME, CellCounter.class,
-  Count cells in HBase table);
+  Count cells in HBase table.);
 pgd.addClass(Export.NAME, Export.class, Write table data to HDFS.);
 pgd.addClass(Import.NAME, Import.class, Import data written by Export.);
 pgd.addClass(ImportTsv.NAME, ImportTsv.class, Import data in TSV 
format.);
 pgd.addClass(LoadIncrementalHFiles.NAME, LoadIncrementalHFiles.class,
  Complete a bulk data load.);
 pgd.addClass(CopyTable.NAME, CopyTable.class,
-Export a table from local cluster to peer cluster);
+Export a table from local cluster to peer cluster.);
 pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, Compare +
  the data from tables in two different clusters. WARNING: It +
  doesn't work for incrementColumnValues'd cells since the +
  timestamp is changed after being appended to the log.);
+pgd.addClass(WALPlayer.NAME, WALPlayer.class, Replay WAL files.);
+pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, Export +
+ the specific snapshot to a given FileSystem.);
+
 ProgramDriver.class.getMethod(driver, new Class [] {String[].class}).
   invoke(pgd, new Object[]{args});
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ee5263db/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 2655e2b..5021c74 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -86,6 +86,8 @@ import org.apache.hadoop.util.ToolRunner;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class ExportSnapshot extends Configured implements Tool {
+  public static final String NAME = exportsnapshot;
+
   private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
 
   private static final String MR_NUM_MAPS = mapreduce.job.maps;



hbase git commit: HBASE-13739 Remove KeyValueUtil.ensureKeyValue(cell) from MOB code.(Jingcheng)

2015-05-22 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/hbase-11339 5098f365b - 132f65ea1


HBASE-13739 Remove KeyValueUtil.ensureKeyValue(cell) from MOB code.(Jingcheng)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/132f65ea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/132f65ea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/132f65ea

Branch: refs/heads/hbase-11339
Commit: 132f65ea1f7b4681809dd5e545b3b3b802f0c469
Parents: 5098f36
Author: anoopsjohn anoopsamj...@gmail.com
Authored: Fri May 22 20:15:22 2015 +0530
Committer: anoopsjohn anoopsamj...@gmail.com
Committed: Fri May 22 20:15:22 2015 +0530

--
 .../hadoop/hbase/mob/DefaultMobStoreFlusher.java  | 14 ++
 .../filecompactions/PartitionedMobFileCompactor.java  | 10 +++---
 .../hadoop/hbase/mob/mapreduce/MemStoreWrapper.java   |  5 +
 .../hadoop/hbase/mob/mapreduce/SweepReducer.java  |  4 ++--
 4 files changed, 12 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/132f65ea/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
index 44387f5..608f4e2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
@@ -175,19 +174,18 @@ public class DefaultMobStoreFlusher extends 
DefaultStoreFlusher {
 // If we know that this KV is going to be included always, then 
let us
 // set its memstoreTS to 0. This will help us save space when 
writing to
 // disk.
-KeyValue kv = KeyValueUtil.ensureKeyValue(c);
-if (kv.getValueLength() = mobCellValueSizeThreshold || 
MobUtils.isMobReferenceCell(kv)
-|| kv.getTypeByte() != KeyValue.Type.Put.getCode()) {
-  writer.append(kv);
+if (c.getValueLength() = mobCellValueSizeThreshold || 
MobUtils.isMobReferenceCell(c)
+|| c.getTypeByte() != KeyValue.Type.Put.getCode()) {
+  writer.append(c);
 } else {
   // append the original keyValue in the mob file.
-  mobFileWriter.append(kv);
-  mobSize += kv.getValueLength();
+  mobFileWriter.append(c);
+  mobSize += c.getValueLength();
   mobCount++;
 
   // append the tags to the KeyValue.
   // The key is same, the value is the filename of the mob file
-  KeyValue reference = MobUtils.createMobRefKeyValue(kv, fileName, 
tableNameTag);
+  KeyValue reference = MobUtils.createMobRefKeyValue(c, fileName, 
tableNameTag);
   writer.append(reference);
 }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/132f65ea/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/filecompactions/PartitionedMobFileCompactor.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/filecompactions/PartitionedMobFileCompactor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/filecompactions/PartitionedMobFileCompactor.java
index f02da48..e8729ce 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/filecompactions/PartitionedMobFileCompactor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/filecompactions/PartitionedMobFileCompactor.java
@@ -361,12 +361,10 @@ public class PartitionedMobFileCompactor extends 
MobFileCompactor {
   do {
 hasMore = scanner.next(cells, scannerContext);
 for (Cell cell : cells) {
-  // TODO remove this after the new code are introduced.
-  KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
   // write the mob cell to the mob file.
-  writer.append(kv);
+  writer.append(cell);
   // write the new reference cell to the store file.
-  KeyValue reference = MobUtils.createMobRefKeyValue(kv, fileName, 
tableNameTag);
+  KeyValue reference = MobUtils.createMobRefKeyValue(cell, fileName, 
tableNameTag);
   refFileWriter.append(reference);

hbase git commit: HBASE-13745 Say why a flush was requested in log message

2015-05-22 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master f8893e001 - c2d73f02e


HBASE-13745 Say why a flush was requested in log message


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c2d73f02
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c2d73f02
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c2d73f02

Branch: refs/heads/master
Commit: c2d73f02ebd8510d42862a15e2fa0633d76136e1
Parents: f8893e0
Author: stack st...@apache.org
Authored: Fri May 22 11:41:22 2015 -0700
Committer: stack st...@apache.org
Committed: Fri May 22 11:41:22 2015 -0700

--
 .../org/apache/hadoop/hbase/regionserver/HRegion.java   |  5 -
 .../apache/hadoop/hbase/regionserver/HRegionServer.java | 12 +++-
 .../hadoop/hbase/regionserver/TestDefaultMemStore.java  | 10 ++
 3 files changed, 17 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c2d73f02/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 2c77697..ced6ccb 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1937,10 +1937,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   /**
* Should the memstore be flushed now
*/
-  boolean shouldFlush() {
+  boolean shouldFlush(final StringBuffer whyFlush) {
+whyFlush.setLength(0);
 // This is a rough measure.
 if (this.maxFlushedSeqId  0
(this.maxFlushedSeqId + this.flushPerChanges  
this.sequenceId.get())) {
+  whyFlush.append(more than max edits,  + this.flushPerChanges + , 
since last flush);
   return true;
 }
 long modifiedFlushCheckInterval = flushCheckInterval;
@@ -1961,6 +1963,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 for (Store s : getStores()) {
   if (s.timeOfOldestEdit()  now - modifiedFlushCheckInterval) {
 // we have an old enough edit in the memstore, flush
+whyFlush.append(s.toString() +  has an old edit so flush to free 
WALs);
 return true;
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c2d73f02/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 53a1094..fa56966 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -1553,15 +1553,17 @@ public class HRegionServer extends HasThread implements
 
 @Override
 protected void chore() {
+  final StringBuffer whyFlush = new StringBuffer();
   for (Region r : this.server.onlineRegions.values()) {
-if (r == null)
-  continue;
-if (((HRegion)r).shouldFlush()) {
+if (r == null) continue;
+if (((HRegion)r).shouldFlush(whyFlush)) {
   FlushRequester requester = server.getFlushRequester();
   if (requester != null) {
 long randomDelay = RandomUtils.nextInt(RANGE_OF_DELAY) + 
MIN_DELAY_TIME;
-LOG.info(getName() +  requesting flush for region  +
-  r.getRegionInfo().getRegionNameAsString() +  after a delay of  
+ randomDelay);
+LOG.info(getName() +  requesting flush of  +
+  r.getRegionInfo().getRegionNameAsString() +  because  +
+  whyFlush.toString() +
+   after random delay  + randomDelay + ms);
 //Throttle the flushes by putting a delay. If we don't throttle, 
and there
 //is a balanced write-load on the regions in a table, we might end 
up
 //overwhelming the filesystem with too many flushes at once.

http://git-wip-us.apache.org/repos/asf/hbase/blob/c2d73f02/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index 18e3d22..7857590 100644
--- 

hbase git commit: HBASE-13745 Say why a flush was requested in log message

2015-05-22 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 e5df9bb2e - a6cf9c51d


HBASE-13745 Say why a flush was requested in log message


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a6cf9c51
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a6cf9c51
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a6cf9c51

Branch: refs/heads/branch-1
Commit: a6cf9c51d97851f33593a1df64d6c45d801314fd
Parents: e5df9bb
Author: stack st...@apache.org
Authored: Fri May 22 11:41:22 2015 -0700
Committer: stack st...@apache.org
Committed: Fri May 22 11:42:21 2015 -0700

--
 .../org/apache/hadoop/hbase/regionserver/HRegion.java   |  5 -
 .../apache/hadoop/hbase/regionserver/HRegionServer.java | 12 +++-
 .../hadoop/hbase/regionserver/TestDefaultMemStore.java  | 10 ++
 3 files changed, 17 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a6cf9c51/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 0ab397e..22fdc78 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1942,10 +1942,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   /**
* Should the memstore be flushed now
*/
-  boolean shouldFlush() {
+  boolean shouldFlush(final StringBuffer whyFlush) {
+whyFlush.setLength(0);
 // This is a rough measure.
 if (this.maxFlushedSeqId  0
(this.maxFlushedSeqId + this.flushPerChanges  
this.sequenceId.get())) {
+  whyFlush.append(more than max edits,  + this.flushPerChanges + , 
since last flush);
   return true;
 }
 long modifiedFlushCheckInterval = flushCheckInterval;
@@ -1966,6 +1968,7 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 for (Store s : getStores()) {
   if (s.timeOfOldestEdit()  now - modifiedFlushCheckInterval) {
 // we have an old enough edit in the memstore, flush
+whyFlush.append(s.toString() +  has an old edit so flush to free 
WALs);
 return true;
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6cf9c51/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 9e3af90..843c0a7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -1548,15 +1548,17 @@ public class HRegionServer extends HasThread implements
 
 @Override
 protected void chore() {
+  final StringBuffer whyFlush = new StringBuffer();
   for (Region r : this.server.onlineRegions.values()) {
-if (r == null)
-  continue;
-if (((HRegion)r).shouldFlush()) {
+if (r == null) continue;
+if (((HRegion)r).shouldFlush(whyFlush)) {
   FlushRequester requester = server.getFlushRequester();
   if (requester != null) {
 long randomDelay = RandomUtils.nextInt(RANGE_OF_DELAY) + 
MIN_DELAY_TIME;
-LOG.info(getName() +  requesting flush for region  +
-  r.getRegionInfo().getRegionNameAsString() +  after a delay of  
+ randomDelay);
+LOG.info(getName() +  requesting flush of  +
+  r.getRegionInfo().getRegionNameAsString() +  because  +
+  whyFlush.toString() +
+   after random delay  + randomDelay + ms);
 //Throttle the flushes by putting a delay. If we don't throttle, 
and there
 //is a balanced write-load on the regions in a table, we might end 
up
 //overwhelming the filesystem with too many flushes at once.

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6cf9c51/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java
index 7a405a4..d0c3b91 100644
--- 

hbase git commit: HBASE-13712 Backport HBASE-13199 to branch-1

2015-05-22 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 036c684e1 - e8d8ca74e


HBASE-13712 Backport HBASE-13199 to branch-1

0.98 backport. Includes:
HBASE-13199 Some small improvements on canary tool (Shaohui Liu)
HBASE-13199 ADDENDUM Some small improvements on canary tool (Shaohui Liu)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e8d8ca74
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e8d8ca74
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e8d8ca74

Branch: refs/heads/0.98
Commit: e8d8ca74e553479efdd75f81f705c88a7ec66947
Parents: 036c684
Author: Andrew Purtell apurt...@apache.org
Authored: Fri May 22 12:13:27 2015 -0700
Committer: Andrew Purtell apurt...@apache.org
Committed: Fri May 22 12:13:27 2015 -0700

--
 .../org/apache/hadoop/hbase/tool/Canary.java| 496 +--
 1 file changed, 339 insertions(+), 157 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e8d8ca74/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index 8107027..f55bc3c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -19,14 +19,23 @@
 
 package org.apache.hadoop.hbase.tool;
 
+import java.io.Closeable;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Random;
 import java.util.Set;
 import java.util.TreeSet;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -46,10 +55,14 @@ import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
+import org.apache.hadoop.hbase.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -113,6 +126,171 @@ public final class Canary implements Tool {
 }
   }
 
+  /**
+   * For each column family of the region tries to get one row and outputs the 
latency, or the
+   * failure.
+   */
+  static class RegionTask implements CallableVoid {
+private HConnection connection;
+private HRegionInfo region;
+private Sink sink;
+
+RegionTask(HConnection connection, HRegionInfo region, Sink sink) {
+  this.connection = connection;
+  this.region = region;
+  this.sink = sink;
+}
+
+@Override
+public Void call() {
+  HTableInterface table = null;
+  HTableDescriptor tableDesc = null;
+  try {
+table = connection.getTable(region.getTable());
+tableDesc = table.getTableDescriptor();
+  } catch (IOException e) {
+LOG.debug(sniffRegion failed, e);
+sink.publishReadFailure(region, e);
+if (table != null) {
+  try {
+table.close();
+  } catch (IOException ioe) {
+  }
+}
+return null;
+  }
+
+  byte[] startKey = null;
+  Get get = null;
+  Scan scan = null;
+  ResultScanner rs = null;
+  StopWatch stopWatch = new StopWatch();
+  for (HColumnDescriptor column : tableDesc.getColumnFamilies()) {
+stopWatch.reset();
+startKey = region.getStartKey();
+// Can't do a get on empty start row so do a Scan of first element if 
any instead.
+if (startKey.length  0) {
+  get = new Get(startKey);
+  get.setCacheBlocks(false);
+  get.setFilter(new FirstKeyOnlyFilter());
+  get.addFamily(column.getName());
+} else {
+  scan = new Scan();
+  scan.setCaching(1);
+  scan.setCacheBlocks(false);
+  scan.setFilter(new FirstKeyOnlyFilter());
+  scan.addFamily(column.getName());
+  scan.setMaxResultSize(1L);
+}
+
+try {
+  if (startKey.length  

hbase git commit: HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li xiang)

2015-05-22 Thread jerryjch
Repository: hbase
Updated Branches:
  refs/heads/0.98 e8d8ca74e - 24d1d390b


HBASE-13671 More classes to add to org.apache.hadoop.hbase.mapreduce.driver (li 
xiang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/24d1d390
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/24d1d390
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/24d1d390

Branch: refs/heads/0.98
Commit: 24d1d390b1f02097c36f85037ef90f11c31bb108
Parents: e8d8ca7
Author: Jerry He jerry...@apache.org
Authored: Fri May 22 12:39:11 2015 -0700
Committer: Jerry He jerry...@apache.org
Committed: Fri May 22 12:39:11 2015 -0700

--
 .../java/org/apache/hadoop/hbase/mapreduce/Driver.java  | 12 +---
 .../apache/hadoop/hbase/snapshot/ExportSnapshot.java|  2 ++
 2 files changed, 11 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/24d1d390/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
index 01db92e..352d25e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.mapreduce;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;
+import org.apache.hadoop.hbase.snapshot.ExportSnapshot;
 import org.apache.hadoop.util.ProgramDriver;
 
 /**
@@ -36,21 +37,26 @@ public class Driver {
*/
   public static void main(String[] args) throws Throwable {
 ProgramDriver pgd = new ProgramDriver();
+
 pgd.addClass(RowCounter.NAME, RowCounter.class,
-  Count rows in HBase table);
+  Count rows in HBase table.);
 pgd.addClass(CellCounter.NAME, CellCounter.class,
-  Count cells in HBase table);
+  Count cells in HBase table.);
 pgd.addClass(Export.NAME, Export.class, Write table data to HDFS.);
 pgd.addClass(Import.NAME, Import.class, Import data written by Export.);
 pgd.addClass(ImportTsv.NAME, ImportTsv.class, Import data in TSV 
format.);
 pgd.addClass(LoadIncrementalHFiles.NAME, LoadIncrementalHFiles.class,
  Complete a bulk data load.);
 pgd.addClass(CopyTable.NAME, CopyTable.class,
-Export a table from local cluster to peer cluster);
+Export a table from local cluster to peer cluster.);
 pgd.addClass(VerifyReplication.NAME, VerifyReplication.class, Compare +
  the data from tables in two different clusters. WARNING: It +
  doesn't work for incrementColumnValues'd cells since the +
  timestamp is changed after being appended to the log.);
+pgd.addClass(WALPlayer.NAME, WALPlayer.class, Replay WAL files.);
+pgd.addClass(ExportSnapshot.NAME, ExportSnapshot.class, Export +
+ the specific snapshot to a given FileSystem.);
+
 ProgramDriver.class.getMethod(driver, new Class [] {String[].class}).
   invoke(pgd, new Object[]{args});
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/24d1d390/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 5e2d0df..dedb931 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -91,6 +91,8 @@ import org.apache.hadoop.util.ToolRunner;
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class ExportSnapshot extends Configured implements Tool {
+  public static final String NAME = exportsnapshot;
+
   private static final Log LOG = LogFactory.getLog(ExportSnapshot.class);
 
   private static final String MR_NUM_MAPS = mapreduce.job.maps;