[5/5] hbase git commit: HBASE-16190 IntegrationTestDDLMasterFailover failed with IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)

2016-07-07 Thread enis
HBASE-16190 IntegrationTestDDLMasterFailover failed with 
IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)

Conflicts:

hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d0756e30
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d0756e30
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d0756e30

Branch: refs/heads/branch-1.1
Commit: d0756e306e2b4e9c3ee49344a17a8f1c6239d037
Parents: dcc1243
Author: Enis Soztutar 
Authored: Thu Jul 7 16:34:08 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Jul 7 16:45:32 2016 -0700

--
 .../hbase/IntegrationTestDDLMasterFailover.java | 16 ++--
 1 file changed, 10 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d0756e30/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
index b4a7647..24209ae 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
@@ -25,13 +25,17 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.HBaseFsck;
@@ -202,12 +206,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // TableAction has implemented selectTable() shared by multiple table 
Actions
 protected HTableDescriptor selectTable(ConcurrentHashMap tableMap)
 {
-  // randomly select table from tableMap
-  if (tableMap.isEmpty()){
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (tableMap){
+// randomly select table from tableMap
+if (tableMap.isEmpty()) {
+  return null;
+}
 ArrayList tableList = new 
ArrayList(tableMap.keySet());
 TableName randomKey = 
tableList.get(RandomUtils.nextInt(tableList.size()));
 HTableDescriptor randomHtd = tableMap.get(randomKey);



[4/5] hbase git commit: HBASE-16190 IntegrationTestDDLMasterFailover failed with IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)

2016-07-07 Thread enis
HBASE-16190 IntegrationTestDDLMasterFailover failed with 
IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)

Conflicts:

hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/075f33a0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/075f33a0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/075f33a0

Branch: refs/heads/branch-1.2
Commit: 075f33a06ab27f35fc9d4e46b7bf2220d47992d1
Parents: 8f8736d
Author: Enis Soztutar 
Authored: Thu Jul 7 16:34:08 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Jul 7 16:43:53 2016 -0700

--
 .../hbase/IntegrationTestDDLMasterFailover.java | 16 ++--
 1 file changed, 10 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/075f33a0/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
index b4a7647..24209ae 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
@@ -25,13 +25,17 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.HBaseFsck;
@@ -202,12 +206,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // TableAction has implemented selectTable() shared by multiple table 
Actions
 protected HTableDescriptor selectTable(ConcurrentHashMap tableMap)
 {
-  // randomly select table from tableMap
-  if (tableMap.isEmpty()){
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (tableMap){
+// randomly select table from tableMap
+if (tableMap.isEmpty()) {
+  return null;
+}
 ArrayList tableList = new 
ArrayList(tableMap.keySet());
 TableName randomKey = 
tableList.get(RandomUtils.nextInt(tableList.size()));
 HTableDescriptor randomHtd = tableMap.get(randomKey);



[2/5] hbase git commit: HBASE-16190 IntegrationTestDDLMasterFailover failed with IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)

2016-07-07 Thread enis
HBASE-16190 IntegrationTestDDLMasterFailover failed with 
IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ed07a38d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ed07a38d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ed07a38d

Branch: refs/heads/branch-1
Commit: ed07a38d594ac5871e53e20f5060d489ef440696
Parents: 74090fa
Author: Enis Soztutar 
Authored: Thu Jul 7 16:34:08 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Jul 7 16:34:13 2016 -0700

--
 .../hbase/IntegrationTestDDLMasterFailover.java | 24 
 1 file changed, 14 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ed07a38d/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
index 47a3e4f..52a118a 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
@@ -25,13 +25,17 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.HBaseFsck;
@@ -246,12 +250,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // NamespaceAction has implemented selectNamespace() shared by multiple 
namespace Actions
 protected NamespaceDescriptor selectNamespace(
 ConcurrentHashMap namespaceMap) {
-  // randomly select namespace from namespaceMap
-  if (namespaceMap.isEmpty()) {
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (namespaceMap) {
+// randomly select namespace from namespaceMap
+if (namespaceMap.isEmpty()) {
+  return null;
+}
 ArrayList namespaceList = new 
ArrayList(namespaceMap.keySet());
 String randomKey = 
namespaceList.get(RandomUtils.nextInt(namespaceList.size()));
 NamespaceDescriptor randomNsd = namespaceMap.get(randomKey);
@@ -386,12 +390,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // TableAction has implemented selectTable() shared by multiple table 
Actions
 protected HTableDescriptor selectTable(ConcurrentHashMap tableMap)
 {
-  // randomly select table from tableMap
-  if (tableMap.isEmpty()){
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (tableMap){
+// randomly select table from tableMap
+if (tableMap.isEmpty()) {
+  return null;
+}
 ArrayList tableList = new 
ArrayList(tableMap.keySet());
 TableName randomKey = 
tableList.get(RandomUtils.nextInt(tableList.size()));
 HTableDescriptor randomHtd = tableMap.get(randomKey);



[3/5] hbase git commit: HBASE-16190 IntegrationTestDDLMasterFailover failed with IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)

2016-07-07 Thread enis
HBASE-16190 IntegrationTestDDLMasterFailover failed with 
IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ee63706e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ee63706e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ee63706e

Branch: refs/heads/branch-1.3
Commit: ee63706eed5fb3b72056c0228aae12104366076a
Parents: 1ac755e
Author: Enis Soztutar 
Authored: Thu Jul 7 16:34:08 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Jul 7 16:36:02 2016 -0700

--
 .../hbase/IntegrationTestDDLMasterFailover.java | 24 
 1 file changed, 14 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ee63706e/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
index 47a3e4f..52a118a 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
@@ -25,13 +25,17 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.HBaseFsck;
@@ -246,12 +250,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // NamespaceAction has implemented selectNamespace() shared by multiple 
namespace Actions
 protected NamespaceDescriptor selectNamespace(
 ConcurrentHashMap namespaceMap) {
-  // randomly select namespace from namespaceMap
-  if (namespaceMap.isEmpty()) {
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (namespaceMap) {
+// randomly select namespace from namespaceMap
+if (namespaceMap.isEmpty()) {
+  return null;
+}
 ArrayList namespaceList = new 
ArrayList(namespaceMap.keySet());
 String randomKey = 
namespaceList.get(RandomUtils.nextInt(namespaceList.size()));
 NamespaceDescriptor randomNsd = namespaceMap.get(randomKey);
@@ -386,12 +390,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // TableAction has implemented selectTable() shared by multiple table 
Actions
 protected HTableDescriptor selectTable(ConcurrentHashMap tableMap)
 {
-  // randomly select table from tableMap
-  if (tableMap.isEmpty()){
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (tableMap){
+// randomly select table from tableMap
+if (tableMap.isEmpty()) {
+  return null;
+}
 ArrayList tableList = new 
ArrayList(tableMap.keySet());
 TableName randomKey = 
tableList.get(RandomUtils.nextInt(tableList.size()));
 HTableDescriptor randomHtd = tableMap.get(randomKey);



[1/5] hbase git commit: HBASE-16190 IntegrationTestDDLMasterFailover failed with IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)

2016-07-07 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1 74090faaf -> ed07a38d5
  refs/heads/branch-1.1 dcc1243f6 -> d0756e306
  refs/heads/branch-1.2 8f8736d17 -> 075f33a06
  refs/heads/branch-1.3 1ac755e40 -> ee63706ee
  refs/heads/master e9f5db721 -> 17edca634


HBASE-16190 IntegrationTestDDLMasterFailover failed with 
IllegalArgumentException: n must be positive (Romil Choksi and Heng Chen)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/17edca63
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/17edca63
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/17edca63

Branch: refs/heads/master
Commit: 17edca6346722072a72ec7942e719214083e63a2
Parents: e9f5db7
Author: Enis Soztutar 
Authored: Thu Jul 7 16:34:08 2016 -0700
Committer: Enis Soztutar 
Committed: Thu Jul 7 16:34:08 2016 -0700

--
 .../hbase/IntegrationTestDDLMasterFailover.java | 24 
 1 file changed, 14 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/17edca63/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
index 4b75ce2..92c65d8 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDDLMasterFailover.java
@@ -25,13 +25,17 @@ import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.HBaseFsck;
@@ -246,12 +250,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // NamespaceAction has implemented selectNamespace() shared by multiple 
namespace Actions
 protected NamespaceDescriptor selectNamespace(
 ConcurrentHashMap namespaceMap) {
-  // randomly select namespace from namespaceMap
-  if (namespaceMap.isEmpty()) {
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (namespaceMap) {
+// randomly select namespace from namespaceMap
+if (namespaceMap.isEmpty()) {
+  return null;
+}
 ArrayList namespaceList = new 
ArrayList(namespaceMap.keySet());
 String randomKey = 
namespaceList.get(RandomUtils.nextInt(namespaceList.size()));
 NamespaceDescriptor randomNsd = namespaceMap.get(randomKey);
@@ -386,12 +390,12 @@ public class IntegrationTestDDLMasterFailover extends 
IntegrationTestBase {
 // TableAction has implemented selectTable() shared by multiple table 
Actions
 protected HTableDescriptor selectTable(ConcurrentHashMap tableMap)
 {
-  // randomly select table from tableMap
-  if (tableMap.isEmpty()){
-return null;
-  }
   // synchronization to prevent removal from multiple threads
   synchronized (tableMap){
+// randomly select table from tableMap
+if (tableMap.isEmpty()) {
+  return null;
+}
 ArrayList tableList = new 
ArrayList(tableMap.keySet());
 TableName randomKey = 
tableList.get(RandomUtils.nextInt(tableList.size()));
 HTableDescriptor randomHtd = tableMap.get(randomKey);



hbase git commit: HBASE-15988 User needs to initiate full backup for new table(s) being added for incremental backup

2016-07-07 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-7912 14170122c -> 6efa58128


HBASE-15988 User needs to initiate full backup for new table(s) being added for 
incremental backup


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6efa5812
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6efa5812
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6efa5812

Branch: refs/heads/HBASE-7912
Commit: 6efa58128f876b874164c198b4f60aefde73bf99
Parents: 1417012
Author: tedyu 
Authored: Thu Jul 7 16:22:31 2016 -0700
Committer: tedyu 
Committed: Thu Jul 7 16:22:31 2016 -0700

--
 .../main/java/org/apache/hadoop/hbase/master/HMaster.java | 10 --
 1 file changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6efa5812/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 0a0fdf4..bbd7107 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -169,14 +169,13 @@ import 
org.apache.hadoop.hbase.zookeeper.SplitOrMergeTracker;
 import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.zookeeper.KeeperException;
 import org.mortbay.jetty.Connector;
 import org.mortbay.jetty.nio.SelectChannelConnector;
 import org.mortbay.jetty.servlet.Context;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.protobuf.Descriptors;
@@ -2632,6 +2631,13 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 throw new DoNotRetryIOException("No table covered by incremental 
backup.");
   }
 
+  tableList.removeAll(incrTableSet);
+  if (!tableList.isEmpty()) {
+String extraTables = StringUtils.join(",", tableList);
+LOG.error("Some tables (" + extraTables + ") haven't gone through full 
backup");
+throw new DoNotRetryIOException("Perform full backup on " + 
extraTables + " first, "
++ "then retry the command");
+  }
   LOG.info("Incremental backup for the following table set: " + 
incrTableSet);
   tableList = Lists.newArrayList(incrTableSet);
 }



hbase git commit: add Francis Liu to dev list

2016-07-07 Thread toffer
Repository: hbase
Updated Branches:
  refs/heads/master c137bafe5 -> e9f5db721


add Francis Liu to dev list


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e9f5db72
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e9f5db72
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e9f5db72

Branch: refs/heads/master
Commit: e9f5db7213d045e002c44e418b33550148de8f22
Parents: c137baf
Author: Francis Liu 
Authored: Thu Jul 7 14:15:47 2016 -0700
Committer: Francis Liu 
Committed: Thu Jul 7 14:15:47 2016 -0700

--
 pom.xml | 8 
 1 file changed, 8 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e9f5db72/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 6077a5b..ea7743f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -526,6 +526,14 @@
   http://www.cloudera.com
 
 
+  toffer
+  Francis Liu
+  tof...@apache.org
+  -8
+  Yahoo
+  http://www.yahoo.com
+
+
   virag
   Virag Kothari
   vi...@yahoo-inc.com



hbase git commit: HBASE-16174 Hook cell test up, and fix broken cell test.

2016-07-07 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 ad276ef32 -> 114817253


HBASE-16174 Hook cell test up, and fix broken cell test.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/11481725
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/11481725
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/11481725

Branch: refs/heads/HBASE-14850
Commit: 114817253178f4faef00db93a187e7076fac658d
Parents: ad276ef
Author: Elliott Clark 
Authored: Tue Jul 5 11:13:01 2016 -0700
Committer: Elliott Clark 
Committed: Wed Jul 6 12:11:09 2016 -0700

--
 hbase-native-client/core/BUCK | 8 
 hbase-native-client/core/cell-test.cc | 2 +-
 2 files changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/11481725/hbase-native-client/core/BUCK
--
diff --git a/hbase-native-client/core/BUCK b/hbase-native-client/core/BUCK
index 1c926e3..c615426 100644
--- a/hbase-native-client/core/BUCK
+++ b/hbase-native-client/core/BUCK
@@ -20,6 +20,7 @@ cxx_library(
 name="core",
 exported_headers=[
 "client.h",
+"cell.h",
 "hbase_macros.h",
 "region-location.h",
 "location-cache.h",
@@ -28,6 +29,7 @@ cxx_library(
 "meta-utils.h",
 ],
 srcs=[
+"cell.cc",
 "client.cc",
 "location-cache.cc",
 "meta-utils.cc",
@@ -51,6 +53,12 @@ cxx_test(name="location-cache-test",
  deps=[":core",
"//test-util:test-util", ],
  run_test_separately=True, )
+cxx_test(name="cell-test",
+ srcs=[
+ "cell-test.cc",
+ ],
+ deps=[":core", ],
+ run_test_separately=True, )
 cxx_binary(name="simple-client",
srcs=["simple-client.cc", ],
deps=[":core", "//connection:connection"], )

http://git-wip-us.apache.org/repos/asf/hbase/blob/11481725/hbase-native-client/core/cell-test.cc
--
diff --git a/hbase-native-client/core/cell-test.cc 
b/hbase-native-client/core/cell-test.cc
index cbe50eb..49f567b 100644
--- a/hbase-native-client/core/cell-test.cc
+++ b/hbase-native-client/core/cell-test.cc
@@ -100,7 +100,7 @@ TEST(CellTest, MultipleCellsTest) {
 
 TEST(CellTest, CellRowTest) {
   std::string row = "only-row";
-  std::string family = "";
+  std::string family = "D";
   std::string column = "";
   std::string value = "";
   long timestamp = std::numeric_limits::max();



[46/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/client/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/Durability.html
index 0f065d5..af1f718 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Durability.html
@@ -280,7 +280,7 @@ the order they are declared.
 
 
 values
-public static Durability[] values()
+public static Durability[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -297,7 +297,7 @@ for (Durability c : Durability.values())
 
 
 valueOf
-public static Durability valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Durability valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html 
b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
index 7cc0a93..7dc6ab3 100644
--- a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
+++ b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
@@ -243,7 +243,7 @@ the order they are declared.
 
 
 values
-public static IsolationLevel[] values()
+public static IsolationLevel[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -260,7 +260,7 @@ for (IsolationLevel c : IsolationLevel.values())
 
 
 valueOf
-public static IsolationLevel valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static IsolationLevel valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html 
b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
index bcea5a9..81ef17a 100644
--- a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
@@ -233,7 +233,7 @@ the order they are declared.
 
 
 values
-public static SnapshotType[] values()
+public static SnapshotType[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -250,7 +250,7 @@ for (SnapshotType c : SnapshotType.values())
 
 
 valueOf
-public static SnapshotType valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static SnapshotType valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index 6ff7a5c..a225a9c 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -191,27 +191,27 @@ the order they are declared.
 
 
 
+Put
+Put.setDurability(Durability d) 
+
+
 Append
 Append.setDurability(Durability d) 
 
-
+
 Increment
 Increment.setDurability(Durability d) 
 
-
-Put
-Put.setDurability(Durability d) 
-
 
-Delete
-Delete.setDurab

[27/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index 3f9d94d..d9dd294 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -166,13 +166,13 @@
 
 
 
-protected Server
-ZkCoordinatedStateManager.server 
-
-
 private Server
 ZKSplitLogManagerCoordination.server 
 
+
+protected Server
+ZkCoordinatedStateManager.server 
+
 
 
 
@@ -184,11 +184,11 @@
 
 
 Server
-ZkCoordinatedStateManager.getServer() 
+BaseCoordinatedStateManager.getServer() 
 
 
 Server
-BaseCoordinatedStateManager.getServer() 
+ZkCoordinatedStateManager.getServer() 
 
 
 
@@ -201,11 +201,11 @@
 
 
 void
-ZkCoordinatedStateManager.initialize(Server server) 
+BaseCoordinatedStateManager.initialize(Server server) 
 
 
 void
-BaseCoordinatedStateManager.initialize(Server server) 
+ZkCoordinatedStateManager.initialize(Server server) 
 
 
 
@@ -333,15 +333,15 @@
 
 
 private Server
-SplitLogManager.server 
+CatalogJanitor.server 
 
 
 private Server
-CatalogJanitor.server 
+RegionStateStore.server 
 
 
 private Server
-RegionStateStore.server 
+SplitLogManager.server 
 
 
 
@@ -473,19 +473,19 @@
 
 
 private Server
-HeapMemoryManager.server 
+SplitTransactionImpl.server 
 
 
 private Server
-LogRoller.server 
+SplitTransactionImpl.DaughterOpener.server 
 
 
 private Server
-SplitTransactionImpl.server 
+LogRoller.server 
 
 
 private Server
-SplitTransactionImpl.DaughterOpener.server 
+HeapMemoryManager.server 
 
 
 
@@ -498,23 +498,23 @@
 
 
 Server
-RegionMergeTransactionImpl.getServer() 
+RegionMergeTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 Server
-SplitTransaction.getServer()
-Get the Server running the transaction or rollback
-
+RegionMergeTransactionImpl.getServer() 
 
 
 Server
-RegionMergeTransaction.getServer()
-Get the Server running the transaction or rollback
-
+SplitTransactionImpl.getServer() 
 
 
 Server
-SplitTransactionImpl.getServer() 
+SplitTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 
@@ -550,20 +550,6 @@
 
 
 Region
-RegionMergeTransactionImpl.execute(Server server,
-  RegionServerServices services) 
-
-
-PairOfSameType
-SplitTransaction.execute(Server server,
-  RegionServerServices services)
-Deprecated. 
-use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
-
-
-
-
-Region
 RegionMergeTransaction.execute(Server server,
   RegionServerServices services)
 Deprecated. 
@@ -572,22 +558,22 @@
 
 
 
-PairOfSameType
-SplitTransactionImpl.execute(Server server,
+Region
+RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services) 
 
 
-Region
-RegionMergeTransactionImpl.execute(Server server,
-  RegionServerServices services,
-  User user) 
+PairOfSameType
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services) 
 
 
 PairOfSameType
-SplitTransaction.execute(Server server,
-  RegionServerServices services,
-  User user)
-Run the transaction.
+SplitTransaction.execute(Server server,
+  RegionServerServices services)
+Deprecated. 
+use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
+
 
 
 
@@ -599,11 +585,25 @@
 
 
 
+Region
+RegionMergeTransactionImpl.execute(Server server,
+  RegionServerServices services,
+  User user) 
+
+
 PairOfSameType
 SplitTransactionImpl.execute(Server server,
   RegionServerServices services,
   User user) 
 
+
+PairOfSameType
+SplitTransaction.execute(Server server,
+  RegionServerServices services,
+  User user)
+Run the transaction.
+
+
 
 void
 ReplicationService.initialize(Server rs,
@@ -639,20 +639,6 @@
 
 
 boolean
-RegionMergeTransactionImpl.rollback(Server server,
-RegionServerServices services) 
-
-
-boolean
-SplitTransaction.rollback(Server server,
-RegionServerServices services)
-Deprecated. 
-use #rollback(Server, RegionServerServices, User); as of 
1.0.2, remove in 3.0
-
-
-
-
-boolean
 RegionMergeTransaction.rollback(Server server,
 RegionServerServices services)
 Deprecated. 
@@ -662,21 +648,21 @@
 
 
 boolean
-SplitTransactionImpl.rollback(Server server,
+RegionMergeTransactionImpl.rollback(Server server,
 RegionServerServices services) 
 
 
 boolean
-RegionMergeTransactionImpl.rollback(Server server,
-RegionServerServices services,
-User user) 
+SplitTransactionImpl.rollback(Server server,
+RegionServerServices services

[21/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
index 7a9d607..24c6e4f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
@@ -131,15 +131,15 @@
 
 
 
+RetryingCallerInterceptorContext
+PreemptiveFastFailInterceptor.createEmptyContext() 
+
+
 abstract RetryingCallerInterceptorContext
 RetryingCallerInterceptor.createEmptyContext()
 This returns the context object for the current call.
 
 
-
-RetryingCallerInterceptorContext
-PreemptiveFastFailInterceptor.createEmptyContext() 
-
 
 RetryingCallerInterceptorContext
 NoOpRetryableCallerInterceptor.createEmptyContext() 
@@ -179,46 +179,46 @@
 
 
 
+void
+PreemptiveFastFailInterceptor.handleFailure(RetryingCallerInterceptorContext context,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t) 
+
+
 abstract void
 RetryingCallerInterceptor.handleFailure(RetryingCallerInterceptorContext context,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t)
 Call this function in case we caught a failure during 
retries.
 
 
-
-void
-PreemptiveFastFailInterceptor.handleFailure(RetryingCallerInterceptorContext context,
-  http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t) 
-
 
 void
 NoOpRetryableCallerInterceptor.handleFailure(RetryingCallerInterceptorContext context,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable t) 
 
 
+void
+PreemptiveFastFailInterceptor.intercept(RetryingCallerInterceptorContext context) 
+
+
 abstract void
 RetryingCallerInterceptor.intercept(RetryingCallerInterceptorContext abstractRetryingCallerInterceptorContext)
 Call this function alongside the actual call done on the 
callable.
 
 
-
-void
-PreemptiveFastFailInterceptor.intercept(RetryingCallerInterceptorContext context) 
-
 
 void
 NoOpRetryableCallerInterceptor.intercept(RetryingCallerInterceptorContext abstractRetryingCallerInterceptorContext) 
 
 
+void
+PreemptiveFastFailInterceptor.updateFailureInfo(RetryingCallerInterceptorContext context) 
+
+
 abstract void
 RetryingCallerInterceptor.updateFailureInfo(RetryingCallerInterceptorContext context)
 Call this function to update at the end of the retry.
 
 
-
-void
-PreemptiveFastFailInterceptor.updateFailureInfo(RetryingCallerInterceptorContext context) 
-
 
 void
 NoOpRetryableCallerInterceptor.updateFailureInfo(RetryingCallerInterceptorContext context) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
index 9dd8373..e235b30 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
@@ -251,11 +251,11 @@
 
 
 int
-RowMutations.compareTo(Row i) 
+RegionCoprocessorServiceExec.compareTo(Row o) 
 
 
 int
-RegionCoprocessorServiceExec.compareTo(Row o) 
+Get.compareTo(Row other) 
 
 
 int
@@ -263,11 +263,11 @@
 
 
 int
-Get.compareTo(Row other) 
+Increment.compareTo(Row i) 
 
 
 int
-Increment.compareTo(Row i) 
+RowMutations.compareTo(Row i) 
 
 
 private boolean
@@ -373,14 +373,14 @@
 
 
 void
-HTable.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+Table.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
 
 
 
 void
-Table.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+HTable.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or inte

[23/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
index a5b5277..d04633e 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
@@ -708,16 +708,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 protected Connection
-AbstractRegionServerCallable.connection 
-
-
-protected Connection
 ConnectionCallable.connection 
 
-
+
 (package private) Connection
 ConnectionImplementation.MasterServiceState.connection 
 
+
+protected Connection
+AbstractRegionServerCallable.connection 
+
 
 
 
@@ -964,11 +964,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private Connection
-HRegionPartitioner.connection 
+TableInputFormatBase.connection 
 
 
 private Connection
-TableInputFormatBase.connection 
+HRegionPartitioner.connection 
 
 
 
@@ -1002,18 +1002,18 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-HRegionPartitioner.connection 
-
-
-private Connection
 TableOutputFormat.TableRecordWriter.connection 
 
-
+
 private Connection
 TableInputFormatBase.connection
 The underlying Connection 
of the table.
 
 
+
+private Connection
+HRegionPartitioner.connection 
+
 
 (package private) Connection
 MultiTableOutputFormat.MultiTableRecordWriter.connection 
@@ -1126,11 +1126,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 private Connection
-CatalogJanitor.connection 
+RegionPlacementMaintainer.connection 
 
 
 private Connection
-RegionPlacementMaintainer.connection 
+CatalogJanitor.connection 
 
 
 private Connection
@@ -1916,10 +1916,11 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong successes) 
 
 
-Canary.RegionTask(Connection connection,
+Canary.RegionTask(Connection connection,
   HRegionInfo region,
   Canary.Sink sink,
-  Canary.RegionTask.TaskType taskType) 
+  Canary.RegionTask.TaskType taskType,
+  
boolean rawScanEnabled) 
 
 
 Canary.ZookeeperMonitor(Connection connection,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index 728b572..2f4c765 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -142,19 +142,19 @@ the order they are declared.
 
 
 
-Query
-Query.setConsistency(Consistency consistency)
-Sets the consistency level for this operation
-
-
-
 Scan
 Scan.setConsistency(Consistency consistency) 
 
-
+
 Get
 Get.setConsistency(Consistency consistency) 
 
+
+Query
+Query.setConsistency(Consistency consistency)
+Sets the consistency level for this operation
+
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index ccffcfa..205e022 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -327,7 +327,7 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[] row,
+Table.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 byte[] value,
@@ -338,7 +338,7 @@ service.
 
 
 boolean
-Table.checkAndDelete(byte[] row,
+HTable.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 byte[] value,
@@ -357,7 +357,7 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[] row,
+Table.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
@@ -369,7 +369,7 @@

[33/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
index b7b694c..8be1722 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
@@ -350,11 +350,15 @@
 
 
 void
-BaseRegionObserver.start(CoprocessorEnvironment e) 
+MultiRowMutationEndpoint.start(CoprocessorEnvironment env)
+Stores a reference to the coprocessor environment provided 
by the
+ RegionCoprocessorHost 
from the region where this
+ coprocessor is loaded.
+
 
 
 void
-BaseMasterAndRegionObserver.start(CoprocessorEnvironment ctx) 
+BaseRegionObserver.start(CoprocessorEnvironment e) 
 
 
 void
@@ -366,19 +370,15 @@
 
 
 void
-MultiRowMutationEndpoint.start(CoprocessorEnvironment env)
-Stores a reference to the coprocessor environment provided 
by the
- RegionCoprocessorHost 
from the region where this
- coprocessor is loaded.
-
+BaseRegionServerObserver.start(CoprocessorEnvironment env) 
 
 
 void
-BaseMasterObserver.start(CoprocessorEnvironment ctx) 
+BaseMasterAndRegionObserver.start(CoprocessorEnvironment ctx) 
 
 
 void
-BaseRegionServerObserver.start(CoprocessorEnvironment env) 
+BaseMasterObserver.start(CoprocessorEnvironment ctx) 
 
 
 void
@@ -394,11 +394,11 @@
 
 
 void
-BaseRegionObserver.stop(CoprocessorEnvironment e) 
+MultiRowMutationEndpoint.stop(CoprocessorEnvironment env) 
 
 
 void
-BaseMasterAndRegionObserver.stop(CoprocessorEnvironment ctx) 
+BaseRegionObserver.stop(CoprocessorEnvironment e) 
 
 
 void
@@ -406,15 +406,15 @@
 
 
 void
-MultiRowMutationEndpoint.stop(CoprocessorEnvironment env) 
+BaseRegionServerObserver.stop(CoprocessorEnvironment env) 
 
 
 void
-BaseMasterObserver.stop(CoprocessorEnvironment ctx) 
+BaseMasterAndRegionObserver.stop(CoprocessorEnvironment ctx) 
 
 
 void
-BaseRegionServerObserver.stop(CoprocessorEnvironment env) 
+BaseMasterObserver.stop(CoprocessorEnvironment ctx) 
 
 
 void
@@ -444,11 +444,11 @@
 
 
 void
-ZooKeeperScanPolicyObserver.start(CoprocessorEnvironment e) 
+BulkDeleteEndpoint.start(CoprocessorEnvironment env) 
 
 
 void
-BulkDeleteEndpoint.start(CoprocessorEnvironment env) 
+ZooKeeperScanPolicyObserver.start(CoprocessorEnvironment e) 
 
 
 void
@@ -456,11 +456,11 @@
 
 
 void
-ZooKeeperScanPolicyObserver.stop(CoprocessorEnvironment e) 
+BulkDeleteEndpoint.stop(CoprocessorEnvironment env) 
 
 
 void
-BulkDeleteEndpoint.stop(CoprocessorEnvironment env) 
+ZooKeeperScanPolicyObserver.stop(CoprocessorEnvironment e) 
 
 
 
@@ -589,19 +589,19 @@
 
 
 void
-AccessController.start(CoprocessorEnvironment env) 
+SecureBulkLoadEndpoint.start(CoprocessorEnvironment env) 
 
 
 void
-SecureBulkLoadEndpoint.start(CoprocessorEnvironment env) 
+AccessController.start(CoprocessorEnvironment env) 
 
 
 void
-AccessController.stop(CoprocessorEnvironment env) 
+SecureBulkLoadEndpoint.stop(CoprocessorEnvironment env) 
 
 
 void
-SecureBulkLoadEndpoint.stop(CoprocessorEnvironment env) 
+AccessController.stop(CoprocessorEnvironment env) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index fd60be4..e4c8960 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -642,14 +642,14 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-SimpleRegionNormalizer.computePlanForTable(TableName table)
-Computes next most "urgent" normalization action on the 
table.
+RegionNormalizer.computePlanForTable(TableName table)
+Computes next optimal normalization plan.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-RegionNormalizer.computePlanForTable(TableName table)
-Computes next optimal normalization plan.
+SimpleRegionNormalizer.computePlanForTable(TableName table)
+Computes next most "urgent" normalization action on the 
table.
 
 
 



[39/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 6b1d108..6822681 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -369,13 +369,13 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
@@ -400,13 +400,13 @@
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(List)
 
 
-org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
+org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, RegionServerServices, 
User)
 
 
 
-org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, RegionServerServices, 
User)
+org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
 
 
 
@@ -418,15 +418,15 @@
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(List)
 
 
-org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use FilterBase.filterRowKey(Cell)
+ Instead use Filter.filterRowKey(Cell)
 
 
 
-org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use Filter.filterRowKey(Cell)
+ Instead use FilterBase.filterRowKey(Cell)
 
 
 
@@ -523,10 +523,10 @@
 
 
 
-org.apache.hadoop.hbase.http.InfoServer.getPort()
+org.apache.hadoop.hbase.http.HttpServer.getPort()
 
 
-org.apache.hadoop.hbase.http.HttpServer.getPort()
+org.apache.hadoop.hbase.http.InfoServer.getPort()
 
 
 org.apache.hadoop.hbase.CellUtil.getQualifierBufferShallowCopy(Cell)
@@ -582,13 +582,13 @@
 
 
 
-org.apache.hadoop.hbase.io.ImmutableBytesWritable.getSize()
-use ImmutableBytesWritable.getLength()
 instead
+org.apache.hadoop.hbase.util.Bytes.getSize()
+use Bytes.getLength()
 instead
 
 
 
-org.apache.hadoop.hbase.util.Bytes.getSize()
-use Bytes.getLength()
 instead
+org.apache.hadoop.hbase.io.ImmutableBytesWritable.getSize()
+use ImmutableBytesWritable.getLength()
 instead
 
 
 
@@ -707,16 +707,6 @@
 
 
 
-org.apache.hadoop.hbase.coprocessor.BaseMasterAndRegionObserver.postAddColumn(ObserverContext,
 TableName, HColumnDescriptor)
-
-
-org.apache.hadoop.hbase.coprocessor.BaseMasterObserver.postAddColumn(ObserverContext,
 TableName, HColumnDescriptor)
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
-
-
 org.apache.hadoop.hbase.coprocessor.MasterObserver.postAddColumn(ObserverContext,
 TableName, HColumnDescriptor)
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
@@ -724,13 +714,13 @@
 
 
 
-org.apache.hadoop.hbase.coprocessor.BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext,
 TableName, HColumnDescriptor)
+org.apache.hadoop.hbase.coprocessor.BaseMasterAndRegionObserver.postAddColumn(ObserverContext,
 TableName, HColumnDescriptor)
 
 
-org.apache.hadoop.hbase.coprocessor.BaseMasterObserver.postAddColumnHandler(ObserverContext,
 TableName, HColumnDescriptor)
+org.apache.hadoop.hbase.coprocessor.BaseMasterObserver.postAddColumn(ObserverContext,
 TableName, HColumnDescriptor)
 As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+ (https:

[35/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index fa9b375..82361c0 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1259,32 +1259,32 @@ service.
 
 
 
-static boolean
-CellUtil.matchingRowColumn(Cell left,
+boolean
+KeyValue.KVComparator.matchingRowColumn(Cell left,
   Cell right)
+Deprecated. 
 Compares the row and column of two keyvalues for 
equality
 
 
 
-boolean
-KeyValue.KVComparator.matchingRowColumn(Cell left,
+static boolean
+CellUtil.matchingRowColumn(Cell left,
   Cell right)
-Deprecated. 
 Compares the row and column of two keyvalues for 
equality
 
 
 
-static boolean
-CellUtil.matchingRows(Cell left,
+boolean
+KeyValue.KVComparator.matchingRows(Cell left,
 Cell right)
+Deprecated. 
 Compares the row of two keyvalues for equality
 
 
 
-boolean
-KeyValue.KVComparator.matchingRows(Cell left,
+static boolean
+CellUtil.matchingRows(Cell left,
 Cell right)
-Deprecated. 
 Compares the row of two keyvalues for equality
 
 
@@ -1644,23 +1644,23 @@ service.
 
 
 
-Increment
-Increment.add(Cell cell)
-Add the specified KeyValue to this operation.
-
-
-
 Put
 Put.add(Cell kv)
 Add the specified KeyValue to this Put operation.
 
 
-
+
 Append
 Append.add(Cell cell)
 Add column and value to this Append operation.
 
 
+
+Increment
+Increment.add(Cell cell)
+Add the specified KeyValue to this operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1753,23 +1753,23 @@ service.
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
-
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Append
 Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
+
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
 
 
@@ -1801,23 +1801,23 @@ service.
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal
-BigDecimalColumnInterpreter.getValue(byte[] colFamily,
-byte[] colQualifier,
-Cell kv) 
-
-
 http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
 DoubleColumnInterpreter.getValue(byte[] colFamily,
 byte[] colQualifier,
 Cell c) 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 LongColumnInterpreter.getValue(byte[] colFamily,
 byte[] colQualifier,
 Cell kv) 
 
+
+http://

[32/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index cecaaab..1b79305 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -561,189 +561,201 @@ service.
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+MasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
+ MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,

[45/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 9377b97..09c8458 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -154,10 +154,10 @@
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 42ede45..e9abd1b 100644
--- a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -167,23 +167,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
   
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+  
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
+  
org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -321,16 +321,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapreduce.RecordReader
-TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
+MultiTableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
 
org.apache.hadoop.mapreduce.TaskAttemptContext context)
-Builds a TableRecordReader.
+Builds a TableRecordReader.
 
 
 
 org.apache.hadoop.mapreduce.RecordReader
-MultiTableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
+TableInputFormatBase.createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
 
org.apache.hadoop.mapreduce.TaskAttemptContext context)
-Builds a TableRecordReader.
+Builds a TableRecordReader.
 
 
 
@@ -367,18 +367,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMapper.map(ImmutableBytesWritable key,
+GroupingTableMapper.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapreduce.Mapper.Context context)
-Pass the key, value to reduce.
+Extract the grouping columns from value to construct a new 
key.
 
 
 
 void
-GroupingTableMapper.map(ImmutableBytesWritable key,
+IdentityTableMapper.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapreduce.Mapper.Context context)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to redu

[26/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 4792ceb..bd0dec9 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -248,11 +248,11 @@
 
 
 ServerName
-SplitLogTask.getServerName() 
+Server.getServerName() 
 
 
 ServerName
-Server.getServerName() 
+SplitLogTask.getServerName() 
 
 
 static ServerName
@@ -591,11 +591,11 @@
 
 
 private ServerName
-AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server 
+FastFailInterceptorContext.server 
 
 
 private ServerName
-FastFailInterceptorContext.server 
+AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server 
 
 
 private ServerName
@@ -911,15 +911,9 @@
 
 
 boolean
-ClusterStatusListener.isDeadServer(ServerName sn)
-Check if we know if a server is dead.
-
-
-
-boolean
 ConnectionImplementation.isDeadServer(ServerName sn) 
 
-
+
 boolean
 ClusterConnection.isDeadServer(ServerName serverName)
 Deprecated. 
@@ -927,6 +921,12 @@
 
 
 
+
+boolean
+ClusterStatusListener.isDeadServer(ServerName sn)
+Check if we know if a server is dead.
+
+
 
 protected boolean
 PreemptiveFastFailInterceptor.isServerInFailureMap(ServerName serverName) 
@@ -1006,15 +1006,15 @@
 HBaseAdmin.rollWALWriterImpl(ServerName sn) 
 
 
+(package private) void
+RegionServerCallable.setClientByServiceName(ServerName service) 
+
+
 (package private) abstract void
 AbstractRegionServerCallable.setClientByServiceName(ServerName serviceName)
 Set the Rpc client for Client services
 
 
-
-(package private) void
-RegionServerCallable.setClientByServiceName(ServerName service) 
-
 
 private void
 AsyncProcess.AsyncRequestFutureImpl.setError(int index,
@@ -1242,13 +1242,13 @@
 
 
 long
-ExponentialClientBackoffPolicy.getBackoffTime(ServerName serverName,
+ClientBackoffPolicyFactory.NoBackoffPolicy.getBackoffTime(ServerName serverName,
 byte[] region,
 ServerStatistics stats) 
 
 
 long
-ClientBackoffPolicyFactory.NoBackoffPolicy.getBackoffTime(ServerName serverName,
+ExponentialClientBackoffPolicy.getBackoffTime(ServerName serverName,
 byte[] region,
 ServerStatistics stats) 
 
@@ -1379,49 +1379,49 @@
 
 
 void
-BaseMasterAndRegionObserver.postMove(ObserverContext ctx,
+MasterObserver.postMove(ObserverContext ctx,
 HRegionInfo region,
 ServerName srcServer,
-ServerName destServer) 
+ServerName destServer)
+Called after the region move has been requested.
+
 
 
 void
-BaseMasterObserver.postMove(ObserverContext ctx,
+BaseMasterAndRegionObserver.postMove(ObserverContext ctx,
 HRegionInfo region,
 ServerName srcServer,
 ServerName destServer) 
 
 
 void
-MasterObserver.postMove(ObserverContext ctx,
+BaseMasterObserver.postMove(ObserverContext ctx,
 HRegionInfo region,
 ServerName srcServer,
-ServerName destServer)
-Called after the region move has been requested.
-
+ServerName destServer) 
 
 
 void
-BaseMasterAndRegionObserver.preMove(ObserverContext ctx,
+MasterObserver.preMove(ObserverContext ctx,
   HRegionInfo region,
   ServerName srcServer,
-  ServerName destServer) 
+  ServerName destServer)
+Called prior to moving a given region from one region 
server to another.
+
 
 
 void
-BaseMasterObserver.preMove(ObserverContext ctx,
+BaseMasterAndRegionObserver.preMove(ObserverContext ctx,
   HRegionInfo region,
   ServerName srcServer,
   ServerName destServer) 
 
 
 void
-MasterObserver.preMove(ObserverContext ctx,
+BaseMasterObserver.preMove(ObserverContext ctx,
   HRegionInfo region,
   ServerName srcServer,
-  ServerName destServer)
-Called prior to moving a given region from one region 
server to another.
-
+  ServerName destServer) 
 
 
 
@@ -2441,11 +2441,9 @@
 
 
 
-(package private) void
-AssignmentManager.regionOnline(HRegionInfo regionInfo,
-ServerName sn)
-Marks the region as online.
-
+void
+RegionStates.regionOnline(HRegionInfo hri,
+ServerName serverName) 
 
 
 void
@@ -245

[52/52] hbase-site git commit: Empty commit

2016-07-07 Thread misty
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/e29c39f2
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/e29c39f2
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/e29c39f2

Branch: refs/heads/asf-site
Commit: e29c39f2401e7a7bbf702d9dc2dff304e11dfed5
Parents: 16d2a5b
Author: Misty Stanley-Jones 
Authored: Thu Jul 7 10:57:38 2016 -0700
Committer: Misty Stanley-Jones 
Committed: Thu Jul 7 10:57:38 2016 -0700

--

--




[43/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 1325409..09aebe0 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -844,23 +844,23 @@
 org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.client.IsolationLevel
 org.apache.hadoop.hbase.client.CompactType
-org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.MasterSwitchType
 org.apache.hadoop.hbase.client.CompactionState
-org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.client.Consistency
 org.apache.hadoop.hbase.client.security.SecurityCapability
+org.apache.hadoop.hbase.quotas.ThrottlingException.Type
+org.apache.hadoop.hbase.quotas.ThrottleType
+org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.regionserver.BloomType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index 75be6b4..5a2923e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -1252,26 +1252,28 @@
 1244
 1245  public static final String 
HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY =
 1246  
"hbase.canary.write.table.check.period";
-1247
-1248  /**
-1249   * Configuration keys for programmatic 
JAAS configuration for secured ZK interaction
-1250   */
-1251  public static final String 
ZK_CLIENT_KEYTAB_FILE = "hbase.zookeeper.client.keytab.file";
-1252  public static final String 
ZK_CLIENT_KERBEROS_PRINCIPAL =
-1253  
"hbase.zookeeper.client.kerberos.principal";
-1254  public static final String 
ZK_SERVER_KEYTAB_FILE = "hbase.zookeeper.server.keytab.file";
-1255  public static final String 
ZK_SERVER_KERBEROS_PRINCIPAL =
-1256  
"hbase.zookeeper.server.kerberos.principal";
-1257
-1258  /** Config key for hbase temporary 
directory in hdfs */
-1259  public static final String 
TEMPORARY_FS_DIRECTORY_KEY = "hbase.fs.tmp.dir";
-1260  public static final String 
DEFAULT_TEMPORARY_HDFS_DIRECTORY = "/user/"
-1261  + System.getProperty("user.name") 
+ "/hbase-staging";
-1262
-1263  private HConstants() {
-1264// Can't be instantiated with this 
ctor.
-1265  }
-1266}
+1247  
+1248  public static final String 
HBASE_CANARY_READ_RAW_SCAN_KEY = "hbase.canary.read.raw.enabled";
+1249
+1250  /**
+1251   * Configuration keys for programmatic 
JAAS configuration for secured ZK interaction
+1252   */
+1253  public static final String 
ZK_CLIENT_KEYTAB_FILE = "hbase.zookeeper.client.keytab.file";
+1254  public static final String 
ZK_CLIENT_KERBEROS_PRINCIPAL =
+1255  
"hbase.zookeeper.client.kerberos.principal";
+1256  public static final String 
ZK_SERVER_KEYTAB_FILE = "hbase.zookeeper.server.keytab.file";
+1257  public static final String 
ZK_SERVER_KERBEROS_PRINCIPAL =
+1258  
"hbase.zookeeper.server.kerberos.principal";
+1259
+1260  /** Config key for hbase temporary 
directory in hdfs */
+1261  public static final String 
TEMPORARY_FS_DIRECTORY_KEY = "hbase.fs.tmp.dir";
+1262  public static final String 
DEFAULT_TEMPORARY_HDFS_DIRECTORY = "/user/"
+1263  + System.getProperty("user.name") 
+ "/hbase-staging";
+1264
+1265  private HConstants() {
+1266// Can't be instantiated with this 
ctor.
+1267  }
+1268}
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/book.html
--
diff --git a/book.html b/book.html
index 28dbf41..e3bf5ee 100644
--- a/book.html
+++ b/book.html
@@ -33766,7 +33766,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 2.0.0-SNAPSHOT
-Last updated 2016-06-15 16:11:32 UTC
+Last updated 2016-04-08 14:30:12 UTC
 
 
 

http://git-wip-us.apache.org/repos/

[48/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index e2f5f0a..8699e9c 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -710,122 +710,126 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+HBASE_CANARY_READ_RAW_SCAN_KEY 
+
+
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_DATA_TTL_KEY
 Canary config keys
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_LOWERLIMIT_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_UPPERLIMIT_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_VALUE_SIZE_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CHECKSUM_VERIFICATION
 If this parameter is set to true, then hbase will read
  data and then verify checksums.
 
 
-
+
 static boolean
 HBASE_CLIENT_ENABLE_FAST_FAIL_MODE_DEFAULT 
 
-
+
 static long
 HBASE_CLIENT_FAST_FAIL_CLEANUP_DURATION_MS_DEFAULT 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_CLEANUP_MS_DURATION_MS 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_INTERCEPTOR_IMPL 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_MODE_ENABLED
 Config for enabling/disabling the fast fail mode.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS 
 
-
+
 static long
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS_DEFAULT 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_INSTANCE_ID
 Parameter name for unique identifier for this 
Configuration
  instance.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_SIZE
 Parameter name for HBase client IPC pool size
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_TYPE
 Parameter name for HBase client IPC pool type
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERREGION_TASKS
 The maximum number of concurrent connections the client 
will maintain to a single
  Region.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERSERVER_TASKS
 The maximum number of concurrent connections the client 
will maintain to a single
  RegionServer.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_TOTAL_TASKS
 The maximum number of concurrent connections the client 
will maintain.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_META_OPERATION_TIMEOUT
 Parameter name for HBase client operation timeout.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_OPERATION_TIMEOUT
 Parameter name for HBase client operation timeout.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or 

[30/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 93ea532..7a2b4de 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -266,11 +266,11 @@ service.
 
 
 protected HRegionLocation
-AbstractRegionServerCallable.location 
+RegionAdminServiceCallable.location 
 
 
 protected HRegionLocation
-RegionAdminServiceCallable.location 
+AbstractRegionServerCallable.location 
 
 
 
@@ -298,11 +298,11 @@ service.
 
 
 protected HRegionLocation
-AbstractRegionServerCallable.getLocation() 
+MultiServerCallable.getLocation() 
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation() 
+AbstractRegionServerCallable.getLocation() 
 
 
 HRegionLocation
@@ -310,26 +310,26 @@ service.
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row)
+HRegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row)
+RegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row,
+HRegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row,
+RegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
@@ -399,13 +399,13 @@ service.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-RegionLocator.getAllRegionLocations()
-Retrieves all of the regions associated with this 
table.
-
+HRegionLocator.getAllRegionLocations() 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HRegionLocator.getAllRegionLocations() 
+RegionLocator.getAllRegionLocations()
+Retrieves all of the regions associated with this 
table.
+
 
 
 private PairList,http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List>



[41/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/coc.html
--
diff --git a/coc.html b/coc.html
index f1487a3..eae42e3 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-07-05
+  Last Published: 
2016-07-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 113e850..079fab6 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-07-05
+  Last Published: 
2016-07-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index a0a6785..988d4ba 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-07-05
+  Last Published: 
2016-07-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index d537710..f3ead1b 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Reactor Dependency Convergence
 
@@ -1743,7 +1743,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-07-05
+  Last Published: 
2016-07-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index f88c311..ffbead9 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-07-05
+  Last Published: 
2016-07-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index e30ce3f..9c45236 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Project Dependency Management
 
@@ -816,7 +816,7 @@
 http://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-07-05
+  Last Published: 
2016-07-07
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index 45310b4..fe152c0 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -1763,7 +1763,6 @@
 ReplicationTracker
 ReplicationTrackerZKImpl
 ReplicationWALReaderManager
-ReplicationZKLockCleanerChore
 ResizableBlockCache
 ResourceBase
 ResourceConfig

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/allclasses-noframe.html
--
diff --git

[38/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 1504f1d..db1d0dc 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -7099,7 +7099,7 @@
 For each column family of the region tries to get one row 
and outputs the latency, or the
  failure.
 
-Canary.RegionTask(Connection,
 HRegionInfo, Canary.Sink, Canary.RegionTask.TaskType) - Constructor 
for class org.apache.hadoop.hbase.tool.Canary.RegionTask
+Canary.RegionTask(Connection,
 HRegionInfo, Canary.Sink, Canary.RegionTask.TaskType, boolean) - 
Constructor for class org.apache.hadoop.hbase.tool.Canary.RegionTask
  
 Canary.RegionTask.TaskType - Enum in org.apache.hadoop.hbase.tool
  
@@ -8397,8 +8397,6 @@
  
 checkLength(int,
 int) - Static method in class 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.CompressedKvDecoder
  
-checkLockExists(String)
 - Method in class org.apache.hadoop.hbase.replication.ReplicationQueuesZKImpl
- 
 checkLogRoll()
 - Method in class org.apache.hadoop.hbase.regionserver.wal.FSHLog
 
 Schedule a log roll if needed.
@@ -8819,8 +8817,6 @@
  
 chore()
 - Method in class org.apache.hadoop.hbase.master.cleaner.CleanerChore
  
-chore()
 - Method in class org.apache.hadoop.hbase.master.cleaner.ReplicationZKLockCleanerChore
- 
 chore()
 - Method in class org.apache.hadoop.hbase.master.ClusterStatusPublisher
  
 chore()
 - Method in class org.apache.hadoop.hbase.master.ExpiredMobFileCleanerChore
@@ -18661,8 +18657,6 @@
 
 Default time to live of cell contents.
 
-DEFAULT_TTL
 - Static variable in class org.apache.hadoop.hbase.master.cleaner.ReplicationZKLockCleanerChore
- 
 DEFAULT_TTL
 - Static variable in class org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner
  
 DEFAULT_TYPE
 - Static variable in class org.apache.hadoop.hbase.codec.prefixtree.decode.PrefixTreeCell
@@ -18847,8 +18841,6 @@
 
 Default thread pool for the procedure
 
-defaultReplicationQueueClass
 - Static variable in class org.apache.hadoop.hbase.replication.ReplicationFactory
- 
 defaultReplicaToOtherReplicas
 - Variable in class org.apache.hadoop.hbase.master.RegionStates
 
 Maintains the mapping from the default region to the 
replica regions.
@@ -32451,8 +32443,6 @@
 Determine based on a list of children under a ZNode, 
whether or not a
  process which created a specified ZNode has obtained a lock.
 
-getLockZNode(String)
 - Method in class org.apache.hadoop.hbase.replication.ReplicationQueuesZKImpl
- 
 getLog(HRegionInfo)
 - Method in class org.apache.hadoop.hbase.util.MetaUtils
  
 getLogDir()
 - Method in class org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore
@@ -42873,6 +42863,8 @@
 
 HBASE_APP_DIR
 - Static variable in class org.apache.hadoop.hbase.http.InfoServer
  
+HBASE_CANARY_READ_RAW_SCAN_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
+ 
 HBASE_CANARY_WRITE_DATA_TTL_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
 
 Canary config keys
@@ -53446,8 +53438,6 @@
  
 LOG
 - Static variable in class org.apache.hadoop.hbase.master.cleaner.LogCleaner
  
-LOG
 - Static variable in class org.apache.hadoop.hbase.master.cleaner.ReplicationZKLockCleanerChore
- 
 LOG
 - Static variable in class org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner
  
 LOG
 - Static variable in class org.apache.hadoop.hbase.master.cleaner.TimeToLiveLogCleaner
@@ -70397,8 +70387,6 @@ service.
  
 queues
 - Variable in class org.apache.hadoop.hbase.ipc.RWQueueRpcExecutor
  
-queues
 - Variable in class org.apache.hadoop.hbase.master.cleaner.ReplicationZKLockCleanerChore
- 
 queues
 - Variable in class org.apache.hadoop.hbase.replication.regionserver.ReplicationSource
  
 queuesClient
 - Variable in class org.apache.hadoop.hbase.replication.ReplicationPeersZKImpl
@@ -70850,6 +70838,10 @@ service.
  
 rawRequest
 - Variable in class org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter
  
+rawScanEnabled
 - Variable in class org.apache.hadoop.hbase.tool.Canary.RegionMonitor
+ 
+rawScanEnabled
 - Variable in class org.apache.hadoop.hbase.tool.Canary.RegionTask
+ 
 RawShort - Class 
in org.apache.hadoop.hbase.types
 
 An DataType for interacting with values 
encoded using
@@ -75461,14 +75453,6 @@ service.
 Creates the helper but doesn't open any file
  Use setInitialPosition after using the constructor if some content needs to 
be skipped
 
-ReplicationZKLockCleanerChore - Class in org.apache.hadoop.hbase.master.cleaner
-
-A cleaner that cleans replication locks on zk which is 
locked by dead region servers
-
-ReplicationZKLockCleanerChore(Stoppable,
 Abortable, int, ZooKeeperWatcher, Configuration) - Constructor for 
class org.apache.hadoop.hbase.master.cleaner.ReplicationZKLockCleanerChore
- 
-replicationZKLockCleanerChore
 - Variable in class org.apache.hadoop.hbase.master.HMaster
- 
 rep

[49/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 063fc4a..ac0ca75 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -6585,6 +6585,8 @@
  
 hasUsedHeapMB()
 - Method in class org.apache.hadoop.hbase.ServerLoad
  
+HBASE_CANARY_READ_RAW_SCAN_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
+ 
 HBASE_CANARY_WRITE_DATA_TTL_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
 
 Canary config keys



[42/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 798c91c..807b442 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -280,10 +280,10 @@
  Warnings
  Errors
 
-1784
+1783
 0
 0
-11571
+11573
 
 Files
 
@@ -2511,7 +2511,7 @@
 org/apache/hadoop/hbase/master/HMaster.java
 0
 0
-49
+48
 
 org/apache/hadoop/hbase/master/HMasterCommandLine.java
 0
@@ -4206,7 +4206,7 @@
 org/apache/hadoop/hbase/replication/ReplicationQueuesZKImpl.java
 0
 0
-6
+5
 
 org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java
 0
@@ -5021,7 +5021,7 @@
 org/apache/hadoop/hbase/tool/Canary.java
 0
 0
-14
+18
 
 org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java
 0
@@ -5973,7 +5973,7 @@
 
 
 http://checkstyle.sourceforge.net/config_blocks.html#NeedBraces";>NeedBraces
-1693
+1692
  Error
 
 coding
@@ -6035,7 +6035,7 @@
 ordered: "true"
 sortStaticImportsAlphabetically: "true"
 option: "top"
-841
+840
  Error
 
 
@@ -6070,7 +6070,7 @@
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription";>NonEmptyAtclauseDescription
-3222
+3224
  Error
 
 misc
@@ -6088,7 +6088,7 @@
 
 max: "100"
 ignorePattern: "^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated"
-342
+344
  Error
 
 
@@ -35049,317 +35049,311 @@
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-629
+625
 
  Error
 sizes
 MethodLength
 Method length is 223 lines (max allowed is 150).
-661
+657
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-772
+768
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-779
+775
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-986
+982
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-995
+991
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1047
+1043
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1090
+1086
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1092
+1088
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1094
+1090
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1096
+1092
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1098
+1094
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1106
+1102
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1107
+1103
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-1110
+1106
 
  Error
 indentation
 Indentation
 'method def' child have incorrect indentation level 3, expected level 
should be 4.
-
+1107
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1168
+1153
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1169
+1154
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1170
+1155
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1171
+1156
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1172
+1157
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1173
+1158
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1174
+1159
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1175
+1160
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1176
+1161
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1177
+1273
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1289
+1285
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1301
+1314
 
  Error
-blocks
-NeedBraces
-'if' construct must use '{}'s.
-1330
-
- Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-1568
-
- Error
-blocks
-NeedBraces
-'if' construct must use '{}'s.
-1826
+1552
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1834
+1810
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1841
+1818
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1849
+1825
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-2273
+1833
 
  Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-2351
+2257
 
  Error
-javadoc
-NonEmptyAtclauseDescription
-At-clause should have a non-empty description.
-2472
-
- Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-2544
+2335
+
+ Error

[51/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/16d2a5b1
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/16d2a5b1
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/16d2a5b1

Branch: refs/heads/asf-site
Commit: 16d2a5b19706084379fba745323e2605acae87a2
Parents: b6dc573
Author: jenkins 
Authored: Thu Jul 7 14:54:22 2016 +
Committer: Misty Stanley-Jones 
Committed: Thu Jul 7 10:56:55 2016 -0700

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|4 +-
 apache_hbase_reference_guide.pdfmarks   |4 +-
 apidocs/constant-values.html|  351 +-
 apidocs/index-all.html  |2 +
 apidocs/org/apache/hadoop/hbase/HConstants.html |  418 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   |4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |  246 +-
 .../hadoop/hbase/class-use/ServerName.html  |4 +-
 .../hadoop/hbase/class-use/TableName.html   |4 +-
 .../hadoop/hbase/client/CompactionState.html|4 +-
 .../apache/hadoop/hbase/client/Durability.html  |4 +-
 .../hadoop/hbase/client/IsolationLevel.html |4 +-
 .../hadoop/hbase/client/SnapshotType.html   |4 +-
 .../hbase/client/class-use/Durability.html  |   18 +-
 .../hadoop/hbase/client/class-use/Mutation.html |8 +-
 .../hadoop/hbase/client/class-use/Result.html   |   30 +-
 .../hadoop/hbase/client/class-use/Row.html  |4 +-
 .../hadoop/hbase/client/class-use/Scan.html |   12 +-
 .../hadoop/hbase/client/package-tree.html   |6 +-
 .../hbase/filter/CompareFilter.CompareOp.html   |4 +-
 .../filter/class-use/Filter.ReturnCode.html |   62 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |   42 +-
 .../hadoop/hbase/filter/package-tree.html   |4 +-
 .../io/class-use/ImmutableBytesWritable.html|   30 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|8 +-
 .../hbase/io/encoding/DataBlockEncoding.html|4 +-
 .../mapreduce/class-use/TableRecordReader.html  |4 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   |4 +-
 .../hbase/quotas/ThrottlingException.Type.html  |4 +-
 .../hadoop/hbase/quotas/package-tree.html   |4 +-
 .../hadoop/hbase/regionserver/BloomType.html|4 +-
 .../hadoop/hbase/util/class-use/Order.html  |   42 +-
 .../util/class-use/PositionedByteRange.html |  386 +-
 apidocs/overview-tree.html  |   20 +-
 .../org/apache/hadoop/hbase/HConstants.html |   42 +-
 book.html   |2 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   | 9098 +-
 coc.html|4 +-
 cygwin.html |4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/allclasses-frame.html|1 -
 devapidocs/allclasses-noframe.html  |1 -
 devapidocs/constant-values.html |  377 +-
 devapidocs/deprecated-list.html |  364 +-
 devapidocs/index-all.html   |   57 +-
 .../org/apache/hadoop/hbase/HConstants.html |  420 +-
 .../HealthChecker.HealthCheckerExitStatus.html  |4 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   |4 +-
 .../org/apache/hadoop/hbase/ScheduledChore.html |2 +-
 .../hadoop/hbase/class-use/Abortable.html   |   62 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |  878 +-
 .../hadoop/hbase/class-use/CellComparator.html  |  122 +-
 .../hadoop/hbase/class-use/CellScanner.html |   96 +-
 .../hadoop/hbase/class-use/ClusterStatus.html   |   20 +-
 .../hadoop/hbase/class-use/Coprocessor.html |   12 +-
 .../hbase/class-use/CoprocessorEnvironment.html |   44 +-
 .../hbase/class-use/HBaseIOException.html   |8 +-
 .../hbase/class-use/HColumnDescriptor.html  |  314 +-
 .../hadoop/hbase/class-use/HRegionInfo.html |  467 +-
 .../hadoop/hbase/class-use/HRegionLocation.html |   24 +-
 .../hbase/class-use/HTableDescriptor.html   |  411 +-
 .../InterProcessLock.MetadataHandler.html   |8 +-
 .../apache/hadoop/hbase/class-use/KeyValue.html |   22 +-
 .../hbase/class-use/NamespaceDescriptor.html|  108 +-
 .../hadoop/hbase/class-use/ProcedureInfo.html   |   22 +-
 .../hadoop/hbase/class-use/RegionLocations.html |   10 +-
 .../hadoop/hbase/class-use/ScheduledChore.html  |   38 +-
 .../apache/hadoop/hbase/class-use/Server.html   |  138 +-
 .../hadoop/hbase/class-use/ServerName.htm

[44/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 99a410e..2f3ad05 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,105 +116,105 @@
 
 
 
-T
-FixedLengthWrapper.decode(PositionedByteRange src) 
+T
+DataType.decode(PositionedByteRange src)
+Read an instance of T from the buffer 
src.
+
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRange src) 
 
 
-byte[]
-OrderedBlob.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+RawLong.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+RawShort.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRange src) 
+T
+FixedLengthWrapper.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRange src) 
+byte[]
+RawBytes.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
+T
+TerminatedWrapper.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
-OrderedFloat64.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRange src) 
 
 
-byte[]
-RawBytes.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRange src) 
+byte[]
+OrderedBlobVar.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/ja

[50/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index 2868354..5917519 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -1129,1210 +1129,1217 @@
 2147483647
 
 
+
+
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+HBASE_CANARY_READ_RAW_SCAN_KEY
+"hbase.canary.read.raw.enabled"
+
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_DATA_TTL_KEY
 "hbase.canary.write.data.ttl"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_LOWERLIMIT_KEY
 "hbase.canary.write.perserver.regions.lowerLimit"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_UPPERLIMIT_KEY
 "hbase.canary.write.perserver.regions.upperLimit"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY
 "hbase.canary.write.table.check.period"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_VALUE_SIZE_KEY
 "hbase.canary.write.value.size"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CHECKSUM_VERIFICATION
 "hbase.regionserver.checksum.verify"
 
-
+
 
 
 public static final boolean
 HBASE_CLIENT_ENABLE_FAST_FAIL_MODE_DEFAULT
 false
 
-
+
 
 
 public static final long
 HBASE_CLIENT_FAST_FAIL_CLEANUP_DURATION_MS_DEFAULT
 60L
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_CLEANUP_MS_DURATION_MS
 "hbase.client.fast.fail.cleanup.duration"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_INTERCEPTOR_IMPL
 "hbase.client.fast.fail.interceptor.impl"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_MODE_ENABLED
 "hbase.client.fast.fail.mode.enabled"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS
 "hbase.client.fastfail.threshold"
 
-
+
 
 
 public static final long
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS_DEFAULT
 6L
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_INSTANCE_ID
 "hbase.client.instance.id"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_SIZE
 "hbase.client.ipc.pool.size"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_TYPE
 "hbase.client.ipc.pool.type"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERREGION_TASKS
 "hbase.client.max.perregion.tasks"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERSERVER_TASKS
 "hbase.client.max.perserver.tasks"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_TOTAL_TASKS
 "hbase.client.max.total.tasks"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_META_OPERATION_TIMEOUT
 "hbase.client.meta.operation.timeout"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="cla

[47/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index a420a89..b9fd2a6 100644
--- a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public static KeepDeletedCells[] values()
+public static KeepDeletedCells[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 3e920b6..6b18f9a 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -919,23 +919,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Put
+Put.add(Cell kv)
+Add the specified KeyValue to this Put operation.
+
+
+
 Append
 Append.add(Cell cell)
 Add column and value to this Append operation.
 
 
-
+
 Increment
 Increment.add(Cell cell)
 Add the specified KeyValue to this operation.
 
 
-
-Put
-Put.add(Cell kv)
-Add the specified KeyValue to this Put operation.
-
-
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1013,27 +1013,27 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 boolean partial) 
 
 
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
 Append
 Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
-
+
 Increment
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
-
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
-Delete
-Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
+
+Delete
+Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
 
 
@@ -1050,20 +1050,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Cell
-TimestampsFilter.getNextCellHint(Cell currentCell)
-Pick the next cell that the scanner sh

[40/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 66c2ee1..66dd983 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -1368,1210 +1368,1217 @@
 2147483647
 
 
+
+
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+HBASE_CANARY_READ_RAW_SCAN_KEY
+"hbase.canary.read.raw.enabled"
+
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_DATA_TTL_KEY
 "hbase.canary.write.data.ttl"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_LOWERLIMIT_KEY
 "hbase.canary.write.perserver.regions.lowerLimit"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_UPPERLIMIT_KEY
 "hbase.canary.write.perserver.regions.upperLimit"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY
 "hbase.canary.write.table.check.period"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_VALUE_SIZE_KEY
 "hbase.canary.write.value.size"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CHECKSUM_VERIFICATION
 "hbase.regionserver.checksum.verify"
 
-
+
 
 
 public static final boolean
 HBASE_CLIENT_ENABLE_FAST_FAIL_MODE_DEFAULT
 false
 
-
+
 
 
 public static final long
 HBASE_CLIENT_FAST_FAIL_CLEANUP_DURATION_MS_DEFAULT
 60L
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_CLEANUP_MS_DURATION_MS
 "hbase.client.fast.fail.cleanup.duration"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_INTERCEPTOR_IMPL
 "hbase.client.fast.fail.interceptor.impl"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_MODE_ENABLED
 "hbase.client.fast.fail.mode.enabled"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS
 "hbase.client.fastfail.threshold"
 
-
+
 
 
 public static final long
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS_DEFAULT
 6L
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_INSTANCE_ID
 "hbase.client.instance.id"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_SIZE
 "hbase.client.ipc.pool.size"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_TYPE
 "hbase.client.ipc.pool.type"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERREGION_TASKS
 "hbase.client.max.perregion.tasks"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERSERVER_TASKS
 "hbase.client.max.perserver.tasks"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_TOTAL_TASKS
 "hbase.client.max.total.tasks"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_META_OPERATION_TIMEOUT
 "hbase.client.meta.operation.timeout"
 
-
+
 
 
 public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=tru

[37/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index c0563bb..dcf9f6b 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -737,122 +737,126 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+HBASE_CANARY_READ_RAW_SCAN_KEY 
+
+
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_DATA_TTL_KEY
 Canary config keys
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_LOWERLIMIT_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_PERSERVER_REGIONS_UPPERLIMIT_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_TABLE_CHECK_PERIOD_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CANARY_WRITE_VALUE_SIZE_KEY 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CHECKSUM_VERIFICATION
 If this parameter is set to true, then hbase will read
  data and then verify checksums.
 
 
-
+
 static boolean
 HBASE_CLIENT_ENABLE_FAST_FAIL_MODE_DEFAULT 
 
-
+
 static long
 HBASE_CLIENT_FAST_FAIL_CLEANUP_DURATION_MS_DEFAULT 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_CLEANUP_MS_DURATION_MS 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_INTERCEPTOR_IMPL 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_MODE_ENABLED
 Config for enabling/disabling the fast fail mode.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS 
 
-
+
 static long
 HBASE_CLIENT_FAST_FAIL_THREASHOLD_MS_DEFAULT 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_INSTANCE_ID
 Parameter name for unique identifier for this 
Configuration
  instance.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_SIZE
 Parameter name for HBase client IPC pool size
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_IPC_POOL_TYPE
 Parameter name for HBase client IPC pool type
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERREGION_TASKS
 The maximum number of concurrent connections the client 
will maintain to a single
  Region.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_PERSERVER_TASKS
 The maximum number of concurrent connections the client 
will maintain to a single
  RegionServer.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_MAX_TOTAL_TASKS
 The maximum number of concurrent connections the client 
will maintain.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_META_OPERATION_TIMEOUT
 Parameter name for HBase client operation timeout.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 HBASE_CLIENT_OPERATION_TIMEOUT
 Parameter name for HBase client operation timeout.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 t

[29/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index a3915ba..9a59883 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -564,23 +564,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor
-HTable.getTableDescriptor()
+Table.getTableDescriptor()
 Gets the table descriptor for 
this table.
 
 
 
-protected HTableDescriptor
-HBaseAdmin.CreateTableFuture.getTableDescriptor() 
+HTableDescriptor
+HTable.getTableDescriptor()
+Gets the table descriptor for 
this table.
+
 
 
 protected HTableDescriptor
-HBaseAdmin.TableFuture.getTableDescriptor() 
+HBaseAdmin.CreateTableFuture.getTableDescriptor() 
 
 
-HTableDescriptor
-Table.getTableDescriptor()
-Gets the table descriptor for 
this table.
-
+protected HTableDescriptor
+HBaseAdmin.TableFuture.getTableDescriptor() 
 
 
 HTableDescriptor
@@ -981,182 +981,170 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postCloneSnapshot(ObserverContext ctx,
+MasterObserver.postCloneSnapshot(ObserverContext ctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot,
-  HTableDescriptor hTableDescriptor) 
+  HTableDescriptor hTableDescriptor)
+Called after a snapshot clone operation has been 
requested.
+
 
 
 void
-BaseMasterObserver.postCloneSnapshot(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCloneSnapshot(ObserverContext ctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot,
   HTableDescriptor hTableDescriptor) 
 
 
 void
-MasterObserver.postCloneSnapshot(ObserverContext ctx,
+BaseMasterObserver.postCloneSnapshot(ObserverContext ctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot,
-  HTableDescriptor hTableDescriptor)
-Called after a snapshot clone operation has been 
requested.
-
+  HTableDescriptor hTableDescriptor) 
 
 
 void
-BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
+MasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
-HRegionInfo[] regions) 
+HRegionInfo[] regions)
+Called after the createTable operation has been 
requested.
+
 
 
 void
-BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
 HRegionInfo[] regions) 
 
 
 void
-MasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
+BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
-HRegionInfo[] regions)
-Called after the createTable operation has been 
requested.
-
+HRegionInfo[] regions) 
 
 
 void
-BaseMasterAndRegionObserver.postCompletedModifyTableAction(ObserverContext ctx,
+MasterObserver.postCompletedModifyTableAction(ObserverContext ctx,
 TableName tableName,
-HTableDescriptor htd) 
+HTableDescriptor htd)
+Called after to modifying a table's properties.
+
 
 
 void
-BaseMasterObserver.postCompletedModifyTableAction(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCompletedModifyTableAction(ObserverContext ctx,
  

[34/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 5006ce4..e146e63 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -246,30 +246,30 @@
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
+DataBlockEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx)
+Create a HFileBlock seeker which find KeyValues within a 
block.
+
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DataBlockEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx)
-Create a HFileBlock seeker which find KeyValues within a 
block.
-
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
 
 
 
@@ -299,30 +299,30 @@
 
 
 protected CellComparator
-HFile.WriterFactory.comparator 
+HFileWriterImpl.comparator
+Key comparator.
+
 
 
-private CellComparator
-HFileReaderImpl.comparator
-Key comparator
-
+protected CellComparator
+HFile.WriterFactory.comparator 
 
 
-protected CellComparator
-CompoundBloomFilterBase.comparator
-Comparator used to compare Bloom filter keys
+private CellComparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
 protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
+CompoundBloomFilterBase.comparator
+Comparator used to compare Bloom filter keys
 
 
 
 private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+HFileReaderImpl.comparator
+Key comparator
 
 
 
@@ -500,44 +500,44 @@
 
 
 private CellComparator
-HStore.comparator 
+StoreFileWriter.Builder.comparator 
 
 
 private CellComparator
-Segment.comparator 
+AbstractMemStore.comparator 
 
 
-private CellComparator
-AbstractMemStore.comparator 
+protected CellComparator
+StripeMultiFileWriter.comparator 
 
 
 private CellComparator
-StoreFileWriter.Builder.comparator 
+Segment.comparator 
 
 
-protected CellComparator
-StripeMultiFileWriter.comparator 
+private CellComparator
+ScanInfo.comparator 
 
 
 private CellComparator
-ScanInfo.comparator 
+HStore.comparator 
 
 
 protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator 
+HRegion.RegionScannerImpl.comparator 
 
 
 protected CellComparator
-HRegion.RegionScannerImpl.comparator 
+StripeStoreFlusher.StripeFlushRequest.comparator 
 
 
-private CellComparator
-DefaultStoreFileManager.kvComparator 
-
-
 protected CellComparator
 KeyValueHeap.KVScannerComparator.kvComparator 
 
+
+private CellComparator
+DefaultStoreFileManager.kvComparator 
+
 
 private CellComparator
 ScanQueryMatcher.rowComparator
@@ -565,25 +565,17 @@
 
 
 CellComparator
-HStore.getComparator() 
+StoreFileReader.getComparator() 
 
 
 protected CellComparator
-Segment.getComparator()
-Returns the Cell comparator used by this segment
-
-
-
-CellComparator
-KeyValueHeap.KVScannerComparator.getComparator() 
-
-
-CellComparator
-StoreFileReader.getComparator() 
+AbstractMemStore.getComparator() 
 
 
 protected CellComparator
-AbstractMemStore.getComparator() 
+Segment.getComparator()
+Returns the Cell comparator used by this segment
+
 
 
 CellComparator
@@ -594,9 +586,17 @@
 Store.getComparator() 
 
 
+CellComparator
+KeyValueHeap.KVScannerComparator.getComparator() 
+
+
 (package private) CellComparator
 StoreFileScanner.getComparator() 
 
+
+CellComparator
+HStore.getComparator() 
+
 
 
 
@@ -629,6 +629,12 @@
 
 
 
+protected void
+DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
+Store store,
+CellComparator kvComparator) 
+
+
 protected abstract void
 StoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
@@ -636,15 +

[31/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index 03e37db..b4e49aa 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -867,7 +867,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-AbstractRegionServerCallable.getHRegionInfo() 
+ScannerCallableWithReplicas.getHRegionInfo() 
 
 
 HRegionInfo
@@ -875,7 +875,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo() 
+AbstractRegionServerCallable.getHRegionInfo() 
 
 
 private HRegionInfo
@@ -1113,16 +1113,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
-HRegionInfo regionInfo) 
-
-
-void
-BaseMasterObserver.postAssign(ObserverContext ctx,
-HRegionInfo regionInfo) 
-
-
-void
 MasterObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo)
 Called after the region assignment has been requested.
@@ -1130,15 +1120,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
-HTableDescriptor desc,
-HRegionInfo[] regions) 
+BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
+HRegionInfo regionInfo) 
 
 
 void
-BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
-HTableDescriptor desc,
-HRegionInfo[] regions) 
+BaseMasterObserver.postAssign(ObserverContext ctx,
+HRegionInfo regionInfo) 
 
 
 void
@@ -1150,15 +1138,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postCreateTable(ObserverContext ctx,
-  HTableDescriptor desc,
-  HRegionInfo[] regions) 
+BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
+HTableDescriptor desc,
+HRegionInfo[] regions) 
 
 
 void
-BaseMasterObserver.postCreateTable(ObserverContext ctx,
-  HTableDescriptor desc,
-  HRegionInfo[] regions) 
+BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
+HTableDescriptor desc,
+HRegionInfo[] regions) 
 
 
 void
@@ -1170,95 +1158,107 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postCreateTableHandler(ObserverContext ctx,
-HTableDescriptor desc,
-HRegionInfo[] regions)
-Deprecated. 
-
+BaseMasterAndRegionObserver.postCreateTable(ObserverContext ctx,
+  HTableDescriptor desc,
+  HRegionInfo[] regions) 
 
 
 void
-BaseMasterObserver.postCreateTableHandler(ObserverContext ctx,
+BaseMasterObserver.postCreateTable(ObserverContext ctx,
+  HTableDescriptor desc,
+  HRegionInfo[] regions) 
+
+
+void
+MasterObserver.postCreateTableHandler(ObserverContext ctx,
 HTableDescriptor desc,
 HRegionInfo[] regions)
 Deprecated. 
 As of release 2.0.0, this will be removed in HBase 3.0.0
(https://issues.apache.org/jira/browse/HBASE-15575";>HBASE-15575).
-   Use BaseMasterObserver.postCompletedCreateTableAction(ObserverContext,
 HTableDescriptor, HRegionInfo[])
+   Use MasterObserver.postCompletedCreateTableAction(ObserverContext,
 HTableDescri

[24/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 65d17de..74fe4e6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -100,14 +100,14 @@
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
-
-
-void
 MasterServices.checkTableModifiable(TableName tableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableName tableName) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 0698107..74ccea7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -163,14 +163,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
-
-
-void
 MasterServices.checkTableModifiable(TableName tableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableName tableName) 
+
 
 
 
@@ -186,13 +186,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-EnableTableHandler
-EnableTableHandler.prepare() 
-
-
 DisableTableHandler
 DisableTableHandler.prepare() 
 
+
+EnableTableHandler
+EnableTableHandler.prepare() 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 17a77fc..c9d1384 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -767,18 +767,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
-  
boolean withSerializationFormat,
-  
boolean checkAuths) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
   
boolean withSerializationFormat,
   boolean checkAuths)
 Creates tags corresponding to given visibility 
expression.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
+  
boolean withSerializationFormat,
+  
boolean checkAuths) 
+
 
 static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 VisibilityUtils.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
@@ -823,11 +823,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 byte[]
-DefaultVisibilityLabelServiceImpl.encodeVisibilityForReplication(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List tags,
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in 
java.lang">Byte serializationFormat) 
-
-
-byte[]
 VisibilityLabelService.encodeVisibilityForReplication(ht

[22/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
index fae22cd..b1ce784 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Put.html
@@ -467,7 +467,7 @@ service.
 
 
 boolean
-HTable.checkAndPut(byte[] row,
+Table.checkAndPut(byte[] row,
   byte[] family,
   byte[] qualifier,
   byte[] value,
@@ -478,7 +478,7 @@ service.
 
 
 boolean
-Table.checkAndPut(byte[] row,
+HTable.checkAndPut(byte[] row,
   byte[] family,
   byte[] qualifier,
   byte[] value,
@@ -497,7 +497,7 @@ service.
 
 
 boolean
-HTable.checkAndPut(byte[] row,
+Table.checkAndPut(byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
@@ -509,7 +509,7 @@ service.
 
 
 boolean
-Table.checkAndPut(byte[] row,
+HTable.checkAndPut(byte[] row,
   byte[] family,
   byte[] qualifier,
   CompareFilter.CompareOp compareOp,
@@ -549,13 +549,13 @@ service.
 
 
 void
-HTable.put(Put put)
+Table.put(Put put)
 Puts some data in the table.
 
 
 
 void
-Table.put(Put put)
+HTable.put(Put put)
 Puts some data in the table.
 
 
@@ -580,11 +580,11 @@ service.
 
 
 void
-HTable.validatePut(Put put) 
+BufferedMutatorImpl.validatePut(Put put) 
 
 
 void
-BufferedMutatorImpl.validatePut(Put put) 
+HTable.validatePut(Put put) 
 
 
 static void
@@ -611,13 +611,13 @@ service.
 
 
 void
-HTable.put(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List puts)
+Table.put(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List puts)
 Puts some data in the table, in batch.
 
 
 
 void
-Table.put(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List puts)
+HTable.put(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List puts)
 Puts some data in the table, in batch.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
index c2612d3..f31d2ff 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RegionLocator.html
@@ -146,13 +146,13 @@ service.
 
 
 RegionLocator
-Connection.getRegionLocator(TableName tableName)
-Retrieve a RegionLocator implementation to inspect region 
information on a table.
-
+ConnectionImplementation.getRegionLocator(TableName tableName) 
 
 
 RegionLocator
-ConnectionImplementation.getRegionLocator(TableName tableName) 
+Connection.getRegionLocator(TableName tableName)
+Retrieve a RegionLocator implementation to inspect region 
information on a table.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index 79477f4..fced4aa 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -381,13 +381,13 @@ service.
 
 
 Result
-HTable.append(Append append)
+Table.append(Append append)
 Appends values to one or more columns within a single 
row.
 
 
 
 Result
-Table.append(Append append)
+HTable.append(Append append)
 Appends values to one or more columns within a single 
row.
 
 
@@ -404,17 +404,17 @@ service.
 
 
 
-Result[]
-ScannerCallable.call(int callTimeout) 
-
-
 Result
 RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(int callTimeout) 
 
-
+
 Result[]
 ClientSmallScanner.SmallScannerCallable.call(int timeout) 
 
+
+Result[]
+ScannerCallable.call(int callTimeout) 
+
 
 Result[]
 ScannerCallableWithReplicas.call(int timeout) 
@@ -480,13 +480,13 @@ service.
 
 
 Result
-HTable.get(Get get)
+Table.get(Get get)
 Extracts certain cells from a given row.
 
 
 
 Result
-Table.get(Get get)
+HTable.get(Get get)
 Extracts certain cells from a 

[36/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
index e386993..97174a2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
@@ -258,7 +258,7 @@ the order they are declared.
 
 
 values
-public static HealthChecker.HealthCheckerExitStatus[] values()
+public static HealthChecker.HealthCheckerExitStatus[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -275,7 +275,7 @@ for (HealthChecker.HealthCheckerExitStatus c : 
HealthChecker.HealthCheckerExitSt
 
 
 valueOf
-public static HealthChecker.HealthCheckerExitStatus valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static HealthChecker.HealthCheckerExitStatus valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index a0339eb..41fb732 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public static KeepDeletedCells[] values()
+public static KeepDeletedCells[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html 
b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
index a20872e..bc7e4ff 100644
--- a/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/ScheduledChore.html
@@ -99,7 +99,7 @@
 
 
 Direct Known Subclasses:
-BalancerChore, CatalogJanitor, CleanerChore, ClusterStatusChore, ClusterStatusPublisher, CompactedHFilesDischarger, ExpiredMobFileCleanerChore, HealthCheckChore, HeapMemoryManager.HeapMemoryTunerChore,
 HMaster.PeriodicDoMetrics, 
HRegionServer.CompactionChecker, HRegionServer.MovedRegionsCleaner, HRegionServer.PeriodicMemstoreFlusher,
 MobCompactionChore, QuotaCache.QuotaRefresherChore, RegionNormalizerChore, ReplicationZKLockCleanerChore, SplitLogManager.TimeoutMonitor, StorefileRefresherChore
+BalancerChore, CatalogJanitor, CleanerChore, ClusterStatusChore, ClusterStatusPublisher, CompactedHFilesDischarger, ExpiredMobFileCleanerChore, HealthCheckChore, HeapMemoryManager.HeapMemoryTunerChore,
 HMaster.PeriodicDoMetrics, 
HRegionServer.CompactionChecker, HRegionServer.MovedRegionsCleaner, HRegionServer.PeriodicMemstoreFlusher,
 MobCompactionChore, QuotaCache.QuotaRefresherChore, RegionNormalizerChore, SplitLogManager.TimeoutMonitor, StorefileRefresherChore
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
index 520fcf3..7a0cb69 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
+

[25/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index c3adb68..cca6a9a 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -754,52 +754,52 @@ service.
 
 
 
-protected TableName
-AbstractRegionServerCallable.tableName 
-
-
 private TableName
-BufferedMutatorParams.tableName 
+HRegionLocator.tableName 
 
-
+
 protected TableName
 RpcRetryingCallerWithReadReplicas.tableName 
 
+
+private TableName
+BufferedMutatorImpl.tableName 
+
 
 private TableName
-AsyncProcess.AsyncRequestFutureImpl.tableName 
+TableState.tableName 
 
 
-protected TableName
-RegionAdminServiceCallable.tableName 
+private TableName
+BufferedMutatorParams.tableName 
 
 
-private TableName
-HRegionLocator.tableName 
+protected TableName
+RegionAdminServiceCallable.tableName 
 
 
 private TableName
-HTable.tableName 
+AsyncProcess.AsyncRequestFutureImpl.tableName 
 
 
 private TableName
-ClientScanner.tableName 
+HTable.tableName 
 
 
 private TableName
-TableState.tableName 
+ClientScanner.tableName 
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName 
+ScannerCallableWithReplicas.tableName 
 
 
 private TableName
-ScannerCallableWithReplicas.tableName 
+HBaseAdmin.TableFuture.tableName 
 
 
-private TableName
-BufferedMutatorImpl.tableName 
+protected TableName
+AbstractRegionServerCallable.tableName 
 
 
 
@@ -833,31 +833,31 @@ service.
 
 
 TableName
-RegionLocator.getName()
-Gets the fully qualified table name instance of this 
table.
-
+HRegionLocator.getName() 
 
 
 TableName
-HRegionLocator.getName() 
+BufferedMutatorImpl.getName() 
 
 
 TableName
-HTable.getName() 
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+Table.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-BufferedMutatorImpl.getName() 
+HTable.getName() 
 
 
 TableName
-Table.getName()
+RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
@@ -871,22 +871,22 @@ service.
 
 
 TableName
-AbstractRegionServerCallable.getTableName() 
+TableState.getTableName()
+Table name for state
+
 
 
 TableName
 BufferedMutatorParams.getTableName() 
 
 
-TableName
-TableState.getTableName()
-Table name for state
-
-
-
 protected TableName
 HBaseAdmin.TableFuture.getTableName() 
 
+
+TableName
+AbstractRegionServerCallable.getTableName() 
+
 
 private TableName
 HBaseAdmin.getTableNameBeforeRestoreSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String snapshotName) 
@@ -1007,16 +1007,16 @@ service.
 
 
 void
+ClusterConnection.cacheLocation(TableName tableName,
+  RegionLocations location) 
+
+
+void
 MetaCache.cacheLocation(TableName tableName,
   RegionLocations locations)
 Put a newly discovered HRegionLocation into the cache.
 
 
-
-void
-ClusterConnection.cacheLocation(TableName tableName,
-  RegionLocations location) 
-
 
 private void
 ConnectionImplementation.cacheLocation(TableName tableName,
@@ -1348,15 +1348,15 @@ service.
 
 
 BufferedMutator
+ConnectionImplementation.getBufferedMutator(TableName tableName) 
+
+
+BufferedMutator
 Connection.getBufferedMutator(TableName tableName)
 
  Retrieve a BufferedMutator for performing 
client-side buffering of writes.
 
 
-
-BufferedMutator
-ConnectionImplementation.getBufferedMutator(TableName tableName) 
-
 
 (package private) RegionLocations
 ConnectionImplementation.getCachedLocation(TableName tableName,
@@ -1470,36 +1470,36 @@ service.
 
 
 RegionLocator
-Connection.getRegionLocator(TableName tableName)
-Retrieve a RegionLocator implementation to inspect region 
information on a table.
-
+ConnectionImplementation.getRegionLocator(TableName tableName) 
 
 
 RegionLocator
-ConnectionImplementation.getRegionLocator(TableName tableName) 
+Connection.getRegionLocator(TableName tableName)
+Retrieve a RegionLocator implementation to inspect region 
information on a table.
+
 
 
 Table
+ConnectionImplementation.getTable(TableName tableName) 
+
+
+Table
 Connection.getTable(TableName tableName)
 Retrieve a Table implementation for accessing a table.
 
 
-
+
 Table
-ConnectionImplementation.getTable(TableName tableName) 
+ConnectionImplementation.getTable(TableName tableName,
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java

[28/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 38c22d9..1689c47 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-private InterProcessLock.MetadataHandler
-ZKInterProcessReadWriteLock.handler 
-
-
 protected InterProcessLock.MetadataHandler
 ZKInterProcessLockBase.handler 
 
+
+private InterProcessLock.MetadataHandler
+ZKInterProcessReadWriteLock.handler 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index 4cec977..344c681 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -785,28 +785,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
 Gets initial, full list of candidate store files to check 
for row-key-before.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
 See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
  for details on this methods.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
-  KeyValue targetKey,
-  Cell candidate) 
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
   KeyValue targetKey,
@@ -814,7 +808,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Updates the candidate list for finding row key before.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StripeStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
   KeyValue targetKey,
@@ -824,6 +818,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  for details on this methods.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
+  KeyValue targetKey,
+  Cell candidate) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/N

[11/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
index 1c082f3..fd6bd7f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PayloadCarryingRpcController.html
@@ -111,13 +111,13 @@
 
 
 
-protected PayloadCarryingRpcController
-ScannerCallable.controller 
-
-
 private PayloadCarryingRpcController
 RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.controller 
 
+
+protected PayloadCarryingRpcController
+ScannerCallable.controller 
+
 
 protected PayloadCarryingRpcController
 PayloadCarryingServerCallable.controller 
@@ -221,13 +221,13 @@
 
 
 
-protected abstract Pair
-AbstractRpcClient.call(PayloadCarryingRpcController pcrc,
+protected Pair
+AsyncRpcClient.call(PayloadCarryingRpcController pcrc,
 com.google.protobuf.Descriptors.MethodDescriptor md,
 com.google.protobuf.Message param,
 com.google.protobuf.Message returnType,
 User ticket,
-http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress isa,
+http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress addr,
 MetricsConnection.CallStats callStats)
 Make a call, passing param, to the IPC server 
running at
  address which is servicing the protocol protocol,
@@ -235,13 +235,13 @@
 
 
 
-protected Pair
-AsyncRpcClient.call(PayloadCarryingRpcController pcrc,
+protected abstract Pair
+AbstractRpcClient.call(PayloadCarryingRpcController pcrc,
 com.google.protobuf.Descriptors.MethodDescriptor md,
 com.google.protobuf.Message param,
 com.google.protobuf.Message returnType,
 User ticket,
-http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress addr,
+http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress isa,
 MetricsConnection.CallStats callStats)
 Make a call, passing param, to the IPC server 
running at
  address which is servicing the protocol protocol,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
index f902fe3..44471ab 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/PriorityFunction.html
@@ -248,42 +248,42 @@
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority)
 Deprecated. 
 
 
 
 RpcScheduler
-SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority)
 Deprecated. 
 
 
 
 RpcScheduler
-FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority)
 Deprecated. 
 
 
 
 RpcScheduler
-RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority,
-Abortable server)
-Constructs a RpcScheduler.
-
+Abortable server) 
 
 
 RpcScheduler
-SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+RpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority,
-Abortable server) 
+Abortable server)
+Constructs a RpcScheduler.
+
 
 
 RpcScheduler
-FifoRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
+SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configuration conf,
 PriorityFunction priority,
 Abortable server) 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapi

[19/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
index b8db80e..98007c6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
@@ -114,114 +114,104 @@
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+MasterObserver.postAbortProcedure(ObserverContext ctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContext ctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+MasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
+ MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDesc

[07/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
index 74cd6ef..2cafb00 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
@@ -141,19 +141,19 @@
 
 
 private TableLockManager
-AssignmentManager.tableLockManager 
+ExpiredMobFileCleanerChore.tableLockManager 
 
 
 private TableLockManager
-ExpiredMobFileCleanerChore.tableLockManager 
+MasterMobCompactionThread.CompactionRunner.tableLockManager 
 
 
 private TableLockManager
-MasterMobCompactionThread.CompactionRunner.tableLockManager 
+MobCompactionChore.tableLockManager 
 
 
 private TableLockManager
-MobCompactionChore.tableLockManager 
+AssignmentManager.tableLockManager 
 
 
 
@@ -239,11 +239,11 @@
 
 
 private TableLockManager
-EnableTableHandler.tableLockManager 
+DisableTableHandler.tableLockManager 
 
 
 private TableLockManager
-DisableTableHandler.tableLockManager 
+EnableTableHandler.tableLockManager 
 
 
 
@@ -413,13 +413,13 @@
 
 
 
-protected TableLockManager
-HRegionServer.tableLockManager 
-
-
 private TableLockManager
 HMobStore.tableLockManager 
 
+
+protected TableLockManager
+HRegionServer.tableLockManager 
+
 
 
 
@@ -431,11 +431,11 @@
 
 
 TableLockManager
-HRegionServer.getTableLockManager() 
+RegionServerServices.getTableLockManager() 
 
 
 TableLockManager
-RegionServerServices.getTableLockManager() 
+HRegionServer.getTableLockManager() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
index 0819182..c7a5273 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
@@ -96,7 +96,7 @@
 
 
 private TableStateManager
-AssignmentManager.tableStateManager 
+RegionStates.tableStateManager 
 
 
 private TableStateManager
@@ -104,7 +104,7 @@
 
 
 private TableStateManager
-RegionStates.tableStateManager 
+AssignmentManager.tableStateManager 
 
 
 
@@ -117,7 +117,7 @@
 
 
 TableStateManager
-AssignmentManager.getTableStateManager() 
+MasterServices.getTableStateManager() 
 
 
 TableStateManager
@@ -125,7 +125,7 @@
 
 
 TableStateManager
-MasterServices.getTableStateManager() 
+AssignmentManager.getTableStateManager() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/cleaner/LogCleaner.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/cleaner/LogCleaner.html 
b/devapidocs/org/apache/hadoop/hbase/master/cleaner/LogCleaner.html
index ef6fdb1..9bb2e46 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/cleaner/LogCleaner.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/cleaner/LogCleaner.html
@@ -36,7 +36,7 @@
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -305,7 +305,7 @@ extends 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/cleaner/ReplicationZKLockCleanerChore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/cleaner/ReplicationZKLockCleanerChore.html
 
b/devapidocs/org/apache/hadoop/hbase/master/cleaner/ReplicationZKLockCleanerChore.html
deleted file mode 100644
index 9931c31..000
--- 
a/devapidocs/org/apache/hadoop/hbase/master/cleaner/ReplicationZKLockCleanerChore.html
+++ /dev/null
@@ -1,409 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd";>
-
-
-
-
-ReplicationZKLockCleanerChore (Apache HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev Class
-Next Class
-
-
-Frames
-No 
Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary: 
-Nested | 
-Field | 
-Constr | 
-Method
-
-
-Detail: 
-Field | 
-Constr | 
-Method
-
-
-
-
-
-
-
-
-org

[17/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
index 43287e3..d3973b4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
@@ -152,11 +152,11 @@
 
 
 private RegionCoprocessorEnvironment
-AggregateImplementation.env 
+MultiRowMutationEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
-MultiRowMutationEndpoint.env 
+AggregateImplementation.env 
 
 
 private RegionCoprocessorEnvironment
@@ -1948,14 +1948,14 @@
 
 
 void
-DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironment e) 
-
-
-void
 VisibilityLabelService.init(RegionCoprocessorEnvironment e)
 System calls this after opening of regions.
 
 
+
+void
+DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironment e) 
+
 
 private void
 VisibilityController.initVisibilityLabelService(RegionCoprocessorEnvironment env) 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
index a6609dd..f76650a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
@@ -110,63 +110,69 @@
 
 
 ReplicationEndpoint
+BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
+  ReplicationEndpoint endpoint) 
+
+
+ReplicationEndpoint
 RegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
   ReplicationEndpoint endpoint)
 This will be called after the replication endpoint is 
instantiated.
 
 
-
-ReplicationEndpoint
-BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
-  ReplicationEndpoint endpoint) 
-
 
 void
-RegionServerObserver.postMerge(ObserverContext c,
+BaseRegionServerObserver.postMerge(ObserverContext c,
   Region regionA,
   Region regionB,
-  Region mergedRegion)
-called after the regions merge.
-
+  Region mergedRegion) 
 
 
 void
-BaseRegionServerObserver.postMerge(ObserverContext c,
+RegionServerObserver.postMerge(ObserverContext c,
   Region regionA,
   Region regionB,
-  Region mergedRegion) 
+  Region mergedRegion)
+called after the regions merge.
+
 
 
 void
-RegionServerObserver.postMergeCommit(ObserverContext ctx,
+BaseRegionServerObserver.postMergeCommit(ObserverContext ctx,
   Region regionA,
   Region regionB,
-  Region mergedRegion)
-This will be called after PONR step as part of regions 
merge transaction.
-
+  Region mergedRegion) 
 
 
 void
-BaseRegionServerObserver.postMergeCommit(ObserverContext ctx,
+RegionServerObserver.postMergeCommit(ObserverContext ctx,
   Region regionA,
   Region regionB,
-  Region mergedRegion) 
+  Region mergedRegion)
+This will be called after PONR step as part of regions 
merge transaction.
+
 
 
 void
+BaseRegionServerObserver.postReplicateLogEntries(ObserverContext ctx,
+  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List entries,
+  CellScanner cells) 
+
+
+void
 RegionServerObserver.postReplicateLogEntries(ObserverConte

[06/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index e8782e2..571e6aa 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -114,23 +114,23 @@
 
 
 void
-BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
+MasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId) 
+  long procId)
+Called before a abortProcedure request has been 
processed.
+
 
 
 void
-BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
   long procId) 
 
 
 void
-MasterObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId)
-Called before a abortProcedure request has been 
processed.
-
+  long procId) 
 
 
 
@@ -161,11 +161,11 @@
 
 
 ProcedureExecutor
-HMaster.getMasterProcedureExecutor() 
+MasterServices.getMasterProcedureExecutor() 
 
 
 ProcedureExecutor
-MasterServices.getMasterProcedureExecutor() 
+HMaster.getMasterProcedureExecutor() 
 
 
 
@@ -197,11 +197,11 @@
 
 
 boolean
-CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-EnableTableProcedure.abort(MasterProcedureEnv env) 
+CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
@@ -209,59 +209,59 @@
 
 
 boolean
-DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateTableProcedure.abort(MasterProcedureEnv env) 
+DeleteTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyTableProcedure.abort(MasterProcedureEnv env) 
+CreateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DisableTableProcedure.abort(MasterProcedureEnv env) 
+EnableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+TruncateTableProcedure.abort(MasterProcedureEnv env) 
 
 
-protected boolean
-ServerCrashProcedure.abort(MasterProcedureEnv env) 
+boolean
+ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteTableProcedure.abort(MasterProcedureEnv env) 
+ModifyTableProcedure.abort(MasterProcedureEnv env) 
 
 
-boolean
-ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
+protected boolean
+ServerCrashProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-TruncateTableProcedure.abort(MasterProcedureEnv env) 
+DisableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
+CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
@@ -269,51 +269,51 @@
 
 
 protected boolean
-DeleteColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
+CloneSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-ModifyTableProcedure.acquireLock(MasterProcedureEnv env) 
+CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DisableTableProcedure.acquireLock(MasterProcedureEnv env) 
+EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-AddColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
+TruncateTableProcedure.acquireLock(Ma

[01/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site b6dc57358 -> e29c39f24


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.html
index 1bb6074..52a7d54 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.html
@@ -388,7 +388,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 createScanner
-private StoreScanner createScanner(Store store)
+private StoreScanner createScanner(Store store)
 throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Creates the scanner for compacting the pipeline.
 Returns:the scanner
@@ -402,7 +402,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 compactSegments
-private void compactSegments(Segment result)
+private void compactSegments(Segment result)
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Updates the given single Segment using the internal store 
scanner,
  who in turn uses ScanQueryMatcher



[02/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 7a4978f..9e8e91b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -109,7 +109,7 @@
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HRegionServer
+public class HRegionServer
 extends HasThread
 implements RegionServerServices, LastSequenceId, 
ConfigurationObserver
 HRegionServer makes a set of HRegions available to clients. 
It checks in with
@@ -1393,7 +1393,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -1402,7 +1402,7 @@ implements 
 
 TEST_SKIP_REPORTING_TRANSITION
-public static boolean TEST_SKIP_REPORTING_TRANSITION
+public static boolean TEST_SKIP_REPORTING_TRANSITION
 For testing only!  Set to true to skip notifying region 
assignment to master .
 
 
@@ -1412,7 +1412,7 @@ implements 
 
 OPEN
-protected static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String OPEN
+protected static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String OPEN
 See Also:Constant
 Field Values
 
 
@@ -1422,7 +1422,7 @@ implements 
 
 CLOSE
-protected static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLOSE
+protected static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLOSE
 See Also:Constant
 Field Values
 
 
@@ -1432,7 +1432,7 @@ implements 
 
 regionsInTransitionInRS
-protected final http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMapBoolean> regionsInTransitionInRS
+protected final http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ConcurrentMapBoolean> regionsInTransitionInRS
 
 
 
@@ -1441,7 +1441,7 @@ implements 
 
 cacheFlusher
-protected MemStoreFlusher cacheFlusher
+protected MemStoreFlusher cacheFlusher
 
 
 
@@ -1450,7 +1450,7 @@ implements 
 
 hMemManager
-protected HeapMemoryManager hMemManager
+protected HeapMemoryManager hMemManager
 
 
 
@@ -1459,7 +1459,7 @@ implements 
 
 clusterConnection
-protected ClusterConnection clusterConnection
+protected ClusterConnection clusterConnection
 Cluster connection to be shared by services.
  Initialized at server startup and closed when server shuts down.
  Clients must never close it explicitly.
@@ -1471,7 +1471,7 @@ implements 
 
 metaTableLocator
-protected MetaTableLocator metaTableLocator
+protected MetaTableLocator metaTableLocator
 
 
 
@@ -1480,7 +1480,7 @@ implements 
 
 recoveringRegionWatcher
-private RecoveringRegionWatcher 
recoveringRegionWatcher
+private RecoveringRegionWatcher 
recoveringRegionWatcher
 
 
 
@@ -1489,7 +1489,7 @@ implements 
 
 tableDescriptors
-protected TableDescriptors tableDescriptors
+protected TableDescriptors tableDescriptors
 Go here to get table descriptors.
 
 
@@ -1499,7 +1499,7 @@ implements 
 
 replicationSourceHandler
-protected ReplicationSourceService replicationSourceHandler
+protected ReplicationSourceService replicationSourceHandler
 
 
 
@@ -1508,7 +1508,7 @@ implements 
 
 replicationSinkHandler
-protected ReplicationSinkService replicationSinkHandler
+protected ReplicationSinkService replicationSinkHandler
 
 
 
@@ -1517,7 +1517,7 @@ implements 
 
 compactSplitThread
-public CompactSplitThread 
compactSplitThread
+public CompactSplitThread 
compactSplitThread
 
 
 
@@ -1526,7 +1526,7 @@ implements 
 
 onlineRegions
-protected final http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,Region> onlineRegions
+protected final http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">Map

[20/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/client/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
index f681782..6a31414 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
@@ -1195,47 +1195,15 @@ service.
 
 
 
-BufferedMutator
-Used to communicate with a single HBase table similar to Table but 
meant for
- batched, asynchronous puts.
-
-
-
-Connection
-A cluster connection encapsulating lower level individual 
connections to actual servers and
- a connection to zookeeper.
-
-
-
 Put
 Used to perform Put operations for a single row.
 
 
 
-RegionLocator
-Used to view region location information for a single HBase 
table.
-
-
-
 Result
 Single row result of a Get or Scan query.
 
 
-
-ResultScanner
-Interface for client-side scanning.
-
-
-
-Scan
-Used to perform Scan operations.
-
-
-
-Table
-Used to communicate with a single HBase table.
-
-
 
 
 
@@ -1300,11 +1268,16 @@ service.
 
 
 
+ResultScanner
+Interface for client-side scanning.
+
+
+
 Scan
 Used to perform Scan operations.
 
 
-
+
 Table
 Used to communicate with a single HBase table.
 
@@ -2189,6 +2162,16 @@ service.
 
 
 
+Append
+Performs Append operations on a single row.
+
+
+
+Increment
+Used to perform Increment operations on a single row.
+
+
+
 OperationWithAttributes 
 
 
@@ -2219,31 +2202,21 @@ service.
 
 
 
-Append
-Performs Append operations on a single row.
-
-
-
 Delete
 Used to perform Delete operations on a single row.
 
 
-
+
 Durability
 Enum describing the durability guarantees for tables and Mutations
  Note that the items must be sorted in order of increasing durability
 
 
-
+
 Get
 Used to perform Get operations on a single row.
 
 
-
-Increment
-Used to perform Increment operations on a single row.
-
-
 
 OperationWithAttributes 
 
@@ -2258,26 +2231,21 @@ service.
 
 
 
-Result
-Single row result of a Get or Scan query.
-
-
-
 ResultScanner
 Interface for client-side scanning.
 
 
-
+
 RowMutations
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 Scan
 Used to perform Scan operations.
 
 
-
+
 Table
 Used to communicate with a single HBase table.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html 
b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
index 2d9959a..3ddf9ee 100644
--- a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
+++ b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
@@ -147,15 +147,15 @@
 
 
 Codec.Decoder
-CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
+CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
 
 
 Codec.Decoder
-CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
+Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
 
 
 Codec.Decoder
-Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
+CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
 
 
 Codec.Decoder
@@ -173,15 +173,15 @@
 
 
 Codec.Decoder
-CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
 
 
 Codec.Decoder
-CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
 
 
 Codec.Decoder
-Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+CellCodecWithTags.getDecoder(http://docs.oracle.com/ja

[09/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
 
b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
index 1fee27e..8450d48 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html
@@ -222,7 +222,7 @@ the order they are declared.
 
 
 values
-public static MasterRpcServices.BalanceSwitchMode[] values()
+public static MasterRpcServices.BalanceSwitchMode[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -239,7 +239,7 @@ for (MasterRpcServices.BalanceSwitchMode c : 
MasterRpcServices.BalanceSwitchMode
 
 
 valueOf
-public static MasterRpcServices.BalanceSwitchMode valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static MasterRpcServices.BalanceSwitchMode valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/RegionState.State.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/RegionState.State.html 
b/devapidocs/org/apache/hadoop/hbase/master/RegionState.State.html
index b977205..39dd081 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/RegionState.State.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/RegionState.State.html
@@ -392,7 +392,7 @@ the order they are declared.
 
 
 values
-public static RegionState.State[] values()
+public static RegionState.State[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -409,7 +409,7 @@ for (RegionState.State c : RegionState.State.values())
 
 
 valueOf
-public static RegionState.State valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static RegionState.State valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
index b9a4717..84b010b 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
@@ -427,6 +427,10 @@ implements Check if configured to put any tables on the active 
master
 
 
+
+static boolean
+userTablesOnMaster(org.apache.hadoop.conf.Configuration conf) 
+
 
 
 
@@ -662,13 +666,22 @@ implements Check if configured to put any tables on the active 
master
 
 
+
+
+
+
+
+userTablesOnMaster
+public static boolean userTablesOnMaster(org.apache.hadoop.conf.Configuration conf)
+
+
 
 
 
 
 
 setConf
-public void setConf(org.apache.hadoop.conf.Configuration conf)
+public void setConf(org.apache.hadoop.conf.Configuration conf)
 
 Specified by:
 setConf in 
interface org.apache.hadoop.conf.Configurable
@@ -681,7 +694,7 @@ implements 
 
 setSlop
-protected void setSlop(org.apache.hadoop.conf.Configuration conf)
+protected void setSlop(org.apache.hadoop.conf.Configuration conf)
 
 
 
@@ -690,7 +703,7 @@ implements 
 
 shouldBeOnMaster
-public boolean shouldBeOnMaster(HRegionInfo region)
+public boolean shouldBeOnMaster(HRegionInfo region)
 Check if a region belongs to some small system table.
  If so, the primary replica may be expected to be put on the master 
regionserver.
 
@@ -701,7 +714,7 @@ implements 
 
 balanceMasterRegions
-protected http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List

[04/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
index 4d79609..ac1bfe6 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
@@ -147,25 +147,17 @@
 
 
 void
-ProcedureCoordinatorRpcs.resetMembers(Procedure procName)
-Notify Members to reset the distributed state for 
procedure
-
-
-
-void
 ZKProcedureCoordinatorRpcs.resetMembers(Procedure proc)
 Delete znodes that are no longer in use.
 
 
-
+
 void
-ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
-ForeignException cause)
-Notify the members that the coordinator has aborted the 
procedure and that it should release
- barrier resources.
+ProcedureCoordinatorRpcs.resetMembers(Procedure procName)
+Notify Members to reset the distributed state for 
procedure
 
 
-
+
 void
 ZKProcedureCoordinatorRpcs.sendAbortToMembers(Procedure proc,
 ForeignException ee)
@@ -175,15 +167,15 @@
  coordinator.
 
 
-
+
 void
-ProcedureCoordinatorRpcs.sendGlobalBarrierAcquire(Procedure procName,
-byte[] info,
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> members)
-Notify the members to acquire barrier for the 
procedure
+ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
+ForeignException cause)
+Notify the members that the coordinator has aborted the 
procedure and that it should release
+ barrier resources.
 
 
-
+
 void
 ZKProcedureCoordinatorRpcs.sendGlobalBarrierAcquire(Procedure proc,
 byte[] info,
@@ -191,19 +183,27 @@
 The "acquire" phase.
 
 
-
+
 void
-ProcedureCoordinatorRpcs.sendGlobalBarrierReached(Procedure procName,
+ProcedureCoordinatorRpcs.sendGlobalBarrierAcquire(Procedure procName,
+byte[] info,
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> members)
-Notify members that all members have acquired their parts 
of the barrier and that they can
- now execute under the global barrier.
+Notify the members to acquire barrier for the 
procedure
 
 
-
+
 void
 ZKProcedureCoordinatorRpcs.sendGlobalBarrierReached(Procedure proc,
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> nodeNames) 
 
+
+void
+ProcedureCoordinatorRpcs.sendGlobalBarrierReached(Procedure procName,
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> members)
+Notify members that all members have acquired their parts 
of the barrier and that they can
+ now execute under the global barrier.
+
+
 
 (package private) boolean
 ProcedureCoordinator.submitProcedure(Procedure proc)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/procedure/class-use/ProcedureCoordinator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure/class-use/ProcedureCoordinator.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure/class-use/ProcedureCoordinator.html
index b28ab7b..9e0f2e4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure/class-use/ProcedureCoordinator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure/class-use/ProcedureCoordinator.html
@@ -104,11 +104,11 @@
 
 
 private ProcedureCoordinator
-SnapshotManager.coordinator 
+EnabledTableSnapshotHandler.coordinator 
 
 
 private ProcedureCoordinator
-EnabledTableSnapshotHandler.coordinator 
+SnapshotManager.coordinator 
 
 
 
@@ -172,14 +172,14 @@
 
 
 boolean
-ProcedureCoordinatorRpcs.start(ProcedureCoordinator listener)
-Initial

[13/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
index 335d5c4..a24194b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
@@ -205,30 +205,30 @@
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
+DataBlockEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx)
+Create a HFileBlock seeker which find KeyValues within a 
block.
+
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DataBlockEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx)
-Create a HFileBlock seeker which find KeyValues within a 
block.
-
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
index d8e55a5..6841665 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
@@ -109,35 +109,35 @@
 
 
 protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-DiffKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+CopyKeyDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
-protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-PrefixKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+protected abstract http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
+BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
-protected abstract http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+protected http://docs.oracle.co

[10/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index 57098aa..ed16af9 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -114,7 +114,7 @@
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HMaster
+public class HMaster
 extends HRegionServer
 implements MasterServices
 HMaster is the "master server" for HBase. An HBase cluster 
has one active
@@ -359,42 +359,38 @@ implements regionServerTracker 
 
 
-private ReplicationZKLockCleanerChore
-replicationZKLockCleanerChore 
-
-
 (package private) MemoryBoundedLogMessageBuffer
 rsFatals 
 
-
+
 private MasterProcedureScheduler.ProcedureEvent
 serverCrashProcessingEnabled 
 
-
+
 private ServerManager
 serverManager 
 
-
+
 (package private) boolean
 serviceStarted 
 
-
+
 (package private) SnapshotManager
 snapshotManager 
 
-
+
 private SplitOrMergeTracker
 splitOrMergeTracker 
 
-
+
 private long
 splitPlanCount 
 
-
+
 private TableStateManager
 tableStateManager 
 
-
+
 private MasterWalManager
 walManager 
 
@@ -1214,7 +1210,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -1223,7 +1219,7 @@ implements 
 
 MASTER
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MASTER
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MASTER
 See Also:Constant
 Field Values
 
 
@@ -1233,7 +1229,7 @@ implements 
 
 activeMasterManager
-private final ActiveMasterManager activeMasterManager
+private final ActiveMasterManager activeMasterManager
 
 
 
@@ -1242,7 +1238,7 @@ implements 
 
 regionServerTracker
-RegionServerTracker regionServerTracker
+RegionServerTracker regionServerTracker
 
 
 
@@ -1251,7 +1247,7 @@ implements 
 
 drainingServerTracker
-private DrainingServerTracker 
drainingServerTracker
+private DrainingServerTracker 
drainingServerTracker
 
 
 
@@ -1260,7 +1256,7 @@ implements 
 
 loadBalancerTracker
-LoadBalancerTracker loadBalancerTracker
+LoadBalancerTracker loadBalancerTracker
 
 
 
@@ -1269,7 +1265,7 @@ implements 
 
 splitOrMergeTracker
-private SplitOrMergeTracker splitOrMergeTracker
+private SplitOrMergeTracker splitOrMergeTracker
 
 
 
@@ -1278,7 +1274,7 @@ implements 
 
 regionNormalizerTracker
-private RegionNormalizerTracker 
regionNormalizerTracker
+private RegionNormalizerTracker 
regionNormalizerTracker
 
 
 
@@ -1287,7 +1283,7 @@ implements 
 
 clusterSchemaService
-private ClusterSchemaService clusterSchemaService
+private ClusterSchemaService clusterSchemaService
 
 
 
@@ -1296,7 +1292,7 @@ implements 
 
 metricsMaster
-final MetricsMaster metricsMaster
+final MetricsMaster metricsMaster
 
 
 
@@ -1305,7 +1301,7 @@ implements 
 
 fileSystemManager
-private MasterFileSystem fileSystemManager
+private MasterFileSystem fileSystemManager
 
 
 
@@ -1314,7 +1310,7 @@ implements 
 
 walManager
-private MasterWalManager walManager
+private MasterWalManager walManager
 
 
 
@@ -1323,7 +1319,7 @@ implements 
 
 serverManager
-private volatile ServerManager serverManager
+private volatile ServerManager serverManager
 
 
 
@@ -1332,7 +1328,7 @@ implements 
 
 assignmentManager
-private AssignmentManager assignmentManager
+private AssignmentManager assignmentManager
 
 
 
@@ -1341,7 +1337,7 @@ implements 
 
 rsFatals
-MemoryBoundedLogMessageBuffer rsFatals
+MemoryBoundedLogMessageBuffer rsFatals
 
 
 
@@ -1350,7 +1346,7 @@ implements 
 
 isActiveMaster
-private volatile boolean isActiveMaster
+private volatile boolean isActiveMaster
 
 
 
@@ -1359,7 +1355,7 @@ implements 
 
 initialized
-private final MasterProcedureScheduler.ProcedureEvent
 initialized
+private final MasterProcedureScheduler.ProcedureEvent
 initialized
 
 
 
@@ -1368,7 +1364,7 @@ implements 
 
 serviceStarted
-volatile boolean serviceStarted
+volatile boolean serviceStarted
 
 
 
@@ -1377,7 +1373,7 @@ implements 
 
 serverCrashProcessingEnabled
-private final MasterProcedureScheduler.ProcedureEvent
 serverCrashProcessingEnabled
+private final MasterProcedureScheduler.ProcedureEvent
 serverCrashProcessingEnabled
 
 
 
@@ -1386,7 +1382,7 @@ implements 
 
 balancer
-private LoadBalancer balancer
+private LoadBalancer balancer
 
 
 
@@ -1395,7 +1391,7 @@ implements 
 
 normalizer
-private RegionNormalizer normalizer
+private RegionNormalizer normalizer
 
 
 
@@ -1404,7 +1400,7 @@ implements 
 
 balancerChore
-private BalancerChore balancerChore
+private BalancerChore balancerChore
 
 
 
@@ -1413,7 +1409,7 @@ implements 
 
 normalizerCho

[18/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
index 6b89aff..8b24449 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
@@ -185,118 +185,118 @@
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+MasterObserver.postAbortProcedure(ObserverContext ctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContext ctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+MasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
+ MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumnHandler(Ob

[05/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
index 5d385bf..dd93329 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
@@ -96,11 +96,11 @@
 
 
 TableProcedureInterface.TableOperationType
-CreateNamespaceProcedure.getTableOperationType() 
+DeleteColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-EnableTableProcedure.getTableOperationType() 
+CreateNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
@@ -108,53 +108,53 @@
 
 
 TableProcedureInterface.TableOperationType
-DeleteColumnFamilyProcedure.getTableOperationType() 
+CloneSnapshotProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CreateTableProcedure.getTableOperationType() 
+TableProcedureInterface.getTableOperationType()
+Given an operation type we can take decisions about what to 
do with pending operations.
+
 
 
 TableProcedureInterface.TableOperationType
-ModifyTableProcedure.getTableOperationType() 
+DeleteTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DisableTableProcedure.getTableOperationType() 
+CreateTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-AddColumnFamilyProcedure.getTableOperationType() 
+EnableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CloneSnapshotProcedure.getTableOperationType() 
+TruncateTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DeleteTableProcedure.getTableOperationType() 
+ModifyColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyNamespaceProcedure.getTableOperationType() 
+RestoreSnapshotProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyColumnFamilyProcedure.getTableOperationType() 
+ModifyTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-TableProcedureInterface.getTableOperationType()
-Given an operation type we can take decisions about what to 
do with pending operations.
-
+ModifyNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-TruncateTableProcedure.getTableOperationType() 
+DisableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-RestoreSnapshotProcedure.getTableOperationType() 
+AddColumnFamilyProcedure.getTableOperationType() 
 
 
 static TableProcedureInterface.TableOperationType

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 2dae7b6..7767a11 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -152,9 +152,9 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.MarkRegionOfflineOpResult
 org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType
 org.apache.hadoop.hbase.master.procedure.ServerProcedureInterface.ServerOperationType
+org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.MarkRegionOfflineOpResult
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
index 8628091..d0ec8d7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/snap

[14/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index c8dd598..7d65a2f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -179,9 +179,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
+  
org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -191,11 +193,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+  
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 
@@ -214,15 +214,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
-  org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
@@ -230,13 +221,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
   Result values,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter) 
 
+
+void
+IdentityTableMap.map(ImmutableBytesWritable key,
+  Result value,
+  org.apache.hadoop.mapred.OutputCollector output,
+  org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
+
 
 boolean
 TableRecordReader.next(ImmutableBytesWritable key,
@@ -277,15 +277,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
-  org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
@@ -293,13 +284,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
   Result values,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter) 
 
+
+void
+IdentityTableMap.map(ImmutableBytesWritable key,
+  Result value,
+  org.apache.hadoop.mapred.OutputCollector output,
+  org.apache.hadoop.mapred.Reporter reporter)
+Pass the key, value to reduce
+
+
 
 void
 IdentityTableReduce.reduce(ImmutableBytesWritable key,
@@ -345,11 +345,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.key 
+TableRecordReaderImpl.key 
 
 
 private ImmutableBytesWritable
-TableRecordReaderImpl.key 
+MultithreadedTableMapper.SubMapRecordReader.key 
 
 
 private ImmutableBytesWritable
@@ -423,13 +423,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.getCurrentKey()
-Returns the current key.
-
+TableSnapshotInputFormatImpl.RecordReader.getCurrentKey() 
 
 
 ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.getCurre

[15/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 1eef354..435b828 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -158,11 +158,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Query.getFilter() 
+Scan.getFilter() 
 
 
 Filter
-Scan.getFilter() 
+Query.getFilter() 
 
 
 
@@ -174,19 +174,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Query
-Query.setFilter(Filter filter)
-Apply the specified server-side filter when performing the 
Query.
-
-
-
 Scan
 Scan.setFilter(Filter filter) 
 
-
+
 Get
 Get.setFilter(Filter filter) 
 
+
+Query
+Query.setFilter(Filter filter)
+Apply the specified server-side filter when performing the 
Query.
+
+
 
 
 
@@ -413,17 +413,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-(package private) Filter
-FilterWrapper.filter 
-
-
 private Filter
 WhileMatchFilter.filter 
 
-
+
 private Filter
 SkipFilter.filter 
 
+
+(package private) Filter
+FilterWrapper.filter 
+
 
 private Filter
 FilterList.seekHintFilter 
@@ -452,65 +452,65 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FilterBase.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments)
-Given the filter's arguments it constructs the filter
-
+FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/Ar

[03/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
index 5cea461..097e1f8 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactingMemStore.html
@@ -346,18 +346,14 @@ extends pushTailToSnapshot() 
 
 
-void
-setInMemoryFlushInProgress(boolean inMemoryFlushInProgress) 
-
-
 private boolean
 shouldFlushInMemory() 
 
-
+
 long
 size() 
 
-
+
 MemStoreSnapshot
 snapshot()
 Push the current active memstore segment into the pipeline
@@ -365,7 +361,7 @@ extends AbstractMemStore.clearSnapshot(long).
 
 
-
+
 private void
 stopCompaction()
 The request to cancel the compaction asynchronous task 
(caused by in-memory flush)
@@ -373,12 +369,12 @@ extends 
+
 boolean
 swapCompactedSegments(VersionedSegmentsList versionedList,
   ImmutableSegment result) 
 
-
+
 void
 updateLowestUnflushedSequenceIdInWAL(boolean onlyIfGreater)
 Updates the wal with the lowest sequence id (oldest entry) 
that is still in memory
@@ -511,7 +507,7 @@ extends 
 
 allowCompaction
-private final http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean allowCompaction
+private final http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicBoolean.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicBoolean allowCompaction
 
 
 
@@ -528,7 +524,7 @@ extends 
 
 CompactingMemStore
-public CompactingMemStore(org.apache.hadoop.conf.Configuration conf,
+public CompactingMemStore(org.apache.hadoop.conf.Configuration conf,
   CellComparator c,
   HStore store,
   RegionServicesForStores regionServices)
@@ -551,7 +547,7 @@ extends 
 
 initInmemoryFlushSize
-private void initInmemoryFlushSize(org.apache.hadoop.conf.Configuration conf)
+private void initInmemoryFlushSize(org.apache.hadoop.conf.Configuration conf)
 
 
 
@@ -560,7 +556,7 @@ extends 
 
 getSegmentSize
-public static long getSegmentSize(Segment segment)
+public static long getSegmentSize(Segment segment)
 
 
 
@@ -569,7 +565,7 @@ extends 
 
 getSegmentsSize
-public static long getSegmentsSize(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list)
+public static long getSegmentsSize(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List list)
 
 
 
@@ -578,7 +574,7 @@ extends 
 
 size
-public long size()
+public long size()
 Returns:Total memory occupied by 
this MemStore.
  This is not thread safe and the memstore may be changed while computing its 
size.
  It is the responsibility of the caller to make sure this doesn't 
happen.
@@ -590,7 +586,7 @@ extends 
 
 finalizeFlush
-public void finalizeFlush()
+public void finalizeFlush()
 This method is called when it is clear that the flush to 
disk is completed.
  The store may do any post-flush actions at this point.
  One example is to update the WAL with sequence number that is known only at 
the store level.
@@ -602,7 +598,7 @@ extends 
 
 isSloppy
-public boolean isSloppy()
+public boolean isSloppy()
 
 
 
@@ -611,7 +607,7 @@ extends 
 
 snapshot
-public MemStoreSnapshot snapshot()
+public MemStoreSnapshot snapshot()
 Push the current active memstore segment into the pipeline
  and create a snapshot of the tail of current compaction pipeline
  Snapshot must be cleared by call to AbstractMemStore.clearSnapshot(long).
@@ -625,7 +621,7 @@ extends 
 
 getFlushableSize
-public long getFlushableSize()
+public long getFlushableSize()
 On flush, how much memory we will clear.
 Returns:size of data that is 
going to be flushed
 
@@ -636,7 +632,7 @@ extends 
 
 updateLowestUnflushedSequenceIdInWAL
-public void updateLowestUnflushedSequenceIdInWAL(boolean onlyIfGreater)
+public void updateLowestUnflushedSequenceIdInWAL(boolean onlyIfGreater)
 Description copied from class: AbstractMemStore
 Updates the wal with the lowest sequence id (oldest entry) 
that is still in memory
 
@@ -652,7 +648,7 @@ extends 
 
 getSegments
-public http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getSegments()
+public http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getSegments()
 Description copied from class: AbstractMemStore
 Returns an ordered list of segmen

[16/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index d078cfc..1f571f8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,20 +144,20 @@
 
 
 
-static HColumnDescriptor
-HColumnDescriptor.parseFrom(byte[] bytes) 
+static HTableDescriptor
+HTableDescriptor.parseFrom(byte[] bytes) 
 
 
-static HRegionInfo
-HRegionInfo.parseFrom(byte[] bytes) 
-
-
 static ClusterId
 ClusterId.parseFrom(byte[] bytes) 
 
+
+static HColumnDescriptor
+HColumnDescriptor.parseFrom(byte[] bytes) 
+
 
-static HTableDescriptor
-HTableDescriptor.parseFrom(byte[] bytes) 
+static HRegionInfo
+HRegionInfo.parseFrom(byte[] bytes) 
 
 
 static SplitLogTask
@@ -253,145 +253,145 @@
 ByteArrayComparable.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPaginationFilter
-ColumnPaginationFilter.parseFrom(byte[] pbBytes) 
+static DependentColumnFilter
+DependentColumnFilter.parseFrom(byte[] pbBytes) 
 
 
-static InclusiveStopFilter
-InclusiveStopFilter.parseFrom(byte[] pbBytes) 
+static PrefixFilter
+PrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static FilterWrapper
-FilterWrapper.parseFrom(byte[] pbBytes) 
+static FamilyFilter
+FamilyFilter.parseFrom(byte[] pbBytes) 
 
 
-static BinaryComparator
-BinaryComparator.parseFrom(byte[] pbBytes) 
+static RegexStringComparator
+RegexStringComparator.parseFrom(byte[] pbBytes) 
 
 
 static WhileMatchFilter
 WhileMatchFilter.parseFrom(byte[] pbBytes) 
 
 
-static FuzzyRowFilter
-FuzzyRowFilter.parseFrom(byte[] pbBytes) 
+static InclusiveStopFilter
+InclusiveStopFilter.parseFrom(byte[] pbBytes) 
 
 
-static FirstKeyValueMatchingQualifiersFilter
-FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[] pbBytes)
-Deprecated. 
- 
+static FirstKeyOnlyFilter
+FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
 
 
-static KeyOnlyFilter
-KeyOnlyFilter.parseFrom(byte[] pbBytes) 
+static TimestampsFilter
+TimestampsFilter.parseFrom(byte[] pbBytes) 
 
 
-static RegexStringComparator
-RegexStringComparator.parseFrom(byte[] pbBytes) 
+static Filter
+Filter.parseFrom(byte[] pbBytes)
+Concrete implementers can signal a failure condition in 
their code by throwing an
+ http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException.
+
 
 
-static ValueFilter
-ValueFilter.parseFrom(byte[] pbBytes) 
+static KeyOnlyFilter
+KeyOnlyFilter.parseFrom(byte[] pbBytes) 
 
 
-static LongComparator
-LongComparator.parseFrom(byte[] pbBytes) 
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
 
 
-static PrefixFilter
-PrefixFilter.parseFrom(byte[] pbBytes) 
+static LongComparator
+LongComparator.parseFrom(byte[] pbBytes) 
 
 
-static RandomRowFilter
-RandomRowFilter.parseFrom(byte[] pbBytes) 
+static MultipleColumnPrefixFilter
+MultipleColumnPrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static SingleColumnValueExcludeFilter
-SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
+static BitComparator
+BitComparator.parseFrom(byte[] pbBytes) 
 
 
-static SubstringComparator
-SubstringComparator.parseFrom(byte[] pbBytes) 
+static QualifierFilter
+QualifierFilter.parseFrom(byte[] pbBytes) 
 
 
-static FilterList
-FilterList.parseFrom(byte[] pbBytes) 
+static SubstringComparator
+SubstringComparator.parseFrom(byte[] pbBytes) 
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static SkipFilter
+SkipFilter.parseFrom(byte[] pbBytes) 
 
 
-static TimestampsFilter
-TimestampsFilter.parseFrom(byte[] pbBytes) 
+static ColumnCountGetFilter
+ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
 
 
-static RowFilter
-RowFilter.parseFrom(byte[] pbBytes) 
+static RandomRowFilter
+RandomRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static SkipFilter
-SkipFilter.parseFrom(byte[] pbBytes) 
+static SingleColumnValueExcludeFilter
+SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
 
 
-static BinaryPrefixComparator
-BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static NullComparator
-NullComparator.parseFrom(byte[] pbBytes) 
-
-
 static SingleColumnValueFilter
 SingleColumnValueFilter.parseFrom(byte[] pbBytes) 
 
-
-static FirstKeyOnlyFilter
-FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
-
 
-static ColumnRangeFilter
-ColumnRangeFilter.parseFrom(byte[] pbBytes) 
+static FilterList
+FilterList.parseFrom(byte[] pbBytes) 
 
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
+static ColumnRangeFilter
+ColumnRangeFilter.parseFrom(byte[] pbB

[08/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
index c350b87..50e469a 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
@@ -196,31 +196,31 @@
 
 
 private HMaster
-ExpiredMobFileCleanerChore.master 
+MetricsMasterWrapperImpl.master 
 
 
 private HMaster
-HMaster.InitializationMonitor.master 
+MasterRpcServices.master 
 
 
 private HMaster
-ClusterStatusPublisher.master 
+ExpiredMobFileCleanerChore.master 
 
 
 private HMaster
-MetricsMasterWrapperImpl.master 
+MasterMobCompactionThread.master 
 
 
 private HMaster
-MasterMobCompactionThread.master 
+HMaster.InitializationMonitor.master 
 
 
 private HMaster
-MobCompactionChore.master 
+ClusterStatusPublisher.master 
 
 
 private HMaster
-MasterRpcServices.master 
+MobCompactionChore.master 
 
 
 private HMaster
@@ -432,15 +432,15 @@
 
 
 private HMaster
-MasterStatusTmpl.ImplData.m_master 
+BackupMasterStatusTmpl.ImplData.m_master 
 
 
 private HMaster
-RegionServerListTmpl.ImplData.m_master 
+MasterStatusTmpl.ImplData.m_master 
 
 
 private HMaster
-BackupMasterStatusTmpl.ImplData.m_master 
+RegionServerListTmpl.ImplData.m_master 
 
 
 private HMaster
@@ -448,11 +448,11 @@
 
 
 private HMaster
-BackupMasterStatusTmplImpl.master 
+MasterStatusTmplImpl.master 
 
 
 private HMaster
-MasterStatusTmplImpl.master 
+BackupMasterStatusTmplImpl.master 
 
 
 
@@ -465,15 +465,15 @@
 
 
 HMaster
-MasterStatusTmpl.ImplData.getMaster() 
+BackupMasterStatusTmpl.ImplData.getMaster() 
 
 
 HMaster
-RegionServerListTmpl.ImplData.getMaster() 
+MasterStatusTmpl.ImplData.getMaster() 
 
 
 HMaster
-BackupMasterStatusTmpl.ImplData.getMaster() 
+RegionServerListTmpl.ImplData.getMaster() 
 
 
 
@@ -486,57 +486,57 @@
 
 
 org.jamon.Renderer
-MasterStatusTmpl.makeRenderer(HMaster master) 
+BackupMasterStatusTmpl.makeRenderer(HMaster master) 
 
 
 org.jamon.Renderer
-RegionServerListTmpl.makeRenderer(HMaster master) 
+MasterStatusTmpl.makeRenderer(HMaster master) 
 
 
 org.jamon.Renderer
-BackupMasterStatusTmpl.makeRenderer(HMaster master) 
+RegionServerListTmpl.makeRenderer(HMaster master) 
 
 
 void
-MasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+BackupMasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
 HMaster master) 
 
 
 void
-RegionServerListTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+MasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
 HMaster master) 
 
 
 void
-BackupMasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+RegionServerListTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
 HMaster master) 
 
 
 void
-MasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+BackupMasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
   HMaster master) 
 
 
 void
-RegionServerListTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+MasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
   HMaster master) 
 
 
 void
-BackupMasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+RegionServerListTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
   HMaster master) 
 
 
 void
-MasterStatusTmpl.ImplData.setMaster(HMaster master) 
+BackupMasterStatusTmpl.ImplData.setMaster(HMaster master) 
 
 
 void
-RegionServerListTmpl.ImplData.setMaster(HMaster master) 
+MasterStatusTmpl.ImplData

[12/52] [partial] hbase-site git commit: Published site at c137bafe51a9212f0c2bb2aa4cde4614b4e537be.

2016-07-07 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/16d2a5b1/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
index 5de8029..62df01b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
@@ -168,35 +168,35 @@
 
 
 Cacheable
-LruBlockCache.getBlock(BlockCacheKey cacheKey,
+InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics)
-Get the buffer of the block with the specified name.
-
+boolean updateCacheMetrics) 
 
 
 Cacheable
-BlockCache.getBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics)
-Fetch block from cache.
-
+boolean updateCacheMetrics) 
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+LruBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics) 
+boolean updateCacheMetrics)
+Get the buffer of the block with the specified name.
+
 
 
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+BlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics) 
+boolean updateCacheMetrics)
+Fetch block from cache.
+
 
 
 Cacheable
@@ -245,23 +245,23 @@
 
 
 void
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+Cacheable buf) 
+
+
+void
 LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf)
 Cache the block with the specified name and buffer.
 
 
-
+
 void
 BlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf)
 Add block to cache (defaults to not in-memory).
 
 
-
-void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
-Cacheable buf) 
-
 
 void
 MemcachedBlockCache.cacheBlock(BlockCacheKey cacheKey,
@@ -269,35 +269,35 @@
 
 
 void
-LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
+InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1)
-Cache the block with the specified name and buffer.
-
+boolean cacheDataInL1) 
 
 
 void
-BlockCache.cacheBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1)
-Add block to cache.
-
+boolean cacheDataInL1) 
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1) 
+boolean cacheDataInL1)
+Cache the block with the specified name and buffer.
+
 
 
 void
-InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+BlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1) 
+boolean cacheDataInL1)
+Add block to cache.
+
 
 
 void
@@ -313,22 +313,22 @@
 
 
 void
-LruBlockCache.returnBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
   Cacheable block) 
 
 
 void
+LruBlockCache.returnBlock(BlockCacheKey cacheKey,
+  Cacheable block) 
+
+
+void
 BlockCache.returnBlock(BlockCacheKey cacheKey,
   Cacheable block)
 Called when the scanner using the block decides to return 
the block once its usage
  is over.
 
 
-
-void
-CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
-  Cacheable block) 
-
 
 void
 MemcachedBlockCache.returnBlock(BlockCacheKey cacheKey,
@@ -411,13 +411,13 @@
 
 
 Cacheable
-FileMmapEngine.read(long offset,
+ByteBufferIOEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 
 
 
 Cacheable
-ByteBufferIOEngine.read(long offset,
+FileMmapEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 
 
@@ -500,13 +500,13 @@
 
 
 Cacheable
-FileMmapEngine.read(long offset,
+ByteBufferIOEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 
 
 
 Cacheable
-ByteBufferIOEngine.read(long offset

hbase git commit: Revert "HBASE-16055 PutSortReducer loses any Visibility/acl attribute set on the"

2016-07-07 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 ec506bb01 -> 60ff6128e


Revert "HBASE-16055 PutSortReducer loses any Visibility/acl attribute set on 
the"

This reverts commit ec506bb01caf65708353665106faab7af1019d73.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/60ff6128
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/60ff6128
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/60ff6128

Branch: refs/heads/0.98
Commit: 60ff6128ef297a379f75dfdca5fcac133eab9e55
Parents: ec506bb
Author: Andrew Purtell 
Authored: Thu Jul 7 10:52:25 2016 -0700
Committer: Andrew Purtell 
Committed: Thu Jul 7 10:52:25 2016 -0700

--
 .../hadoop/hbase/mapreduce/PutSortReducer.java  |  20 +--
 .../hadoop/hbase/mapreduce/TextSortReducer.java |   8 +-
 .../hbase/mapreduce/TestHFileOutputFormat2.java | 127 ---
 3 files changed, 29 insertions(+), 126 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/60ff6128/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
index ebaebcc..792686a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
@@ -18,25 +18,17 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
-import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 import java.util.TreeSet;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.security.visibility.CellVisibility;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.util.StringUtils;
 
@@ -52,17 +44,7 @@ import org.apache.hadoop.util.StringUtils;
 @InterfaceStability.Stable
 public class PutSortReducer extends
 Reducer {
-  // the cell creator
-  private CellCreator kvCreator;
-
-  @Override
-  protected void
-  setup(Reducer.Context context)
-  throws IOException, InterruptedException {
-Configuration conf = context.getConfiguration();
-this.kvCreator = new CellCreator(conf);
-  }
-
+  
   @Override
   protected void reduce(
   ImmutableBytesWritable row,

http://git-wip-us.apache.org/repos/asf/hbase/blob/60ff6128/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
index cb329f8..168ba40 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
@@ -99,8 +99,9 @@ public class TextSortReducer extends
*/
   @Override
   protected void setup(Context context) {
+doSetup(context);
+
 Configuration conf = context.getConfiguration();
-doSetup(context, conf);
 
 parser = new ImportTsv.TsvParser(conf.get(ImportTsv.COLUMNS_CONF_KEY), 
separator);
 if (parser.getRowKeyColumnIndex() == -1) {
@@ -112,9 +113,10 @@ public class TextSortReducer extends
   /**
* Handles common parameter initialization that a subclass might want to 
leverage.
* @param context
-   * @param conf
*/
-  protected void doSetup(Context context, Configuration conf) {
+  protected void doSetup(Context context) {
+Configuration conf = context.getConfiguration();
+
 // If a custom separator has been used,
 // decode it back from Base64 encoding.
 separator = conf.get(ImportTsv.SEPARATOR_CONF_KEY);

http://git-wip-us.apache.org/repos/asf/hbase/blob/60ff6128/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
 
b/hbase-server/src/test/

hbase git commit: HBASE-16132 Scan does not return all the result when regionserver is busy (binlijin)

2016-07-07 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 78ef0513b -> 8f8736d17


HBASE-16132 Scan does not return all the result when regionserver is busy 
(binlijin)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8f8736d1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8f8736d1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8f8736d1

Branch: refs/heads/branch-1.2
Commit: 8f8736d177e6bf6ac0c45672121827abc1f09d73
Parents: 78ef051
Author: Yu Li 
Authored: Mon Jul 4 21:43:29 2016 +0800
Committer: Yu Li 
Committed: Thu Jul 7 22:42:18 2016 +0800

--
 .../hadoop/hbase/client/ScannerCallableWithReplicas.java   | 6 +-
 .../org/apache/hadoop/hbase/master/HMasterCommandLine.java | 3 +--
 2 files changed, 6 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8f8736d1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
index f8feca1..4d5bb0f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
@@ -202,6 +202,9 @@ class ScannerCallableWithReplicas implements 
RetryingCallable {
   updateCurrentlyServingReplica(r.getSecond(), r.getFirst(), done, 
pool);
 }
 return r == null ? null : r.getFirst(); // great we got an answer
+  } else {
+throw new IOException("Failed to get result within timeout, timeout="
++ timeout + "ms");
   }
 } catch (ExecutionException e) {
   RpcRetryingCallerWithReadReplicas.throwEnrichedException(e, retries);
@@ -216,7 +219,8 @@ class ScannerCallableWithReplicas implements 
RetryingCallable {
   // calls succeeded or failed. In all case, we stop all our tasks.
   cs.cancelAll();
 }
-return null; // unreachable
+LOG.error("Imposible? Arrive at an unreachable line..."); // unreachable
+throw new IOException("Imposible? Arrive at an unreachable line...");
   }
 
   private void updateCurrentlyServingReplica(ScannerCallable scanner, Result[] 
result,

http://git-wip-us.apache.org/repos/asf/hbase/blob/8f8736d1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
index 706dc23..d2957c7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
@@ -101,8 +101,7 @@ public class HMasterCommandLine extends ServerCommandLine {
 // minRegionServers used to be minServers.  Support it too.
 if (cmd.hasOption("minServers")) {
   String val = cmd.getOptionValue("minServers");
-  getConf().setInt("hbase.regions.server.count.min",
-  Integer.parseInt(val));
+  getConf().setInt("hbase.regions.server.count.min", 
Integer.parseInt(val));
   LOG.debug("minServers set to " + val);
 }
 



hbase git commit: HBASE-16132 Scan does not return all the result when regionserver is busy (binlijin)

2016-07-07 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 11c59d9dd -> dcc1243f6


HBASE-16132 Scan does not return all the result when regionserver is busy 
(binlijin)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dcc1243f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dcc1243f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dcc1243f

Branch: refs/heads/branch-1.1
Commit: dcc1243f61332b696e1df193d553249b469c0e63
Parents: 11c59d9
Author: Yu Li 
Authored: Mon Jul 4 21:43:29 2016 +0800
Committer: Yu Li 
Committed: Thu Jul 7 22:43:37 2016 +0800

--
 .../hadoop/hbase/client/ScannerCallableWithReplicas.java   | 6 +-
 .../org/apache/hadoop/hbase/master/HMasterCommandLine.java | 3 +--
 2 files changed, 6 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dcc1243f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
index 5675aa0..c02b438 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
@@ -202,6 +202,9 @@ class ScannerCallableWithReplicas implements 
RetryingCallable {
   updateCurrentlyServingReplica(r.getSecond(), r.getFirst(), done, 
pool);
 }
 return r == null ? null : r.getFirst(); // great we got an answer
+  } else {
+throw new IOException("Failed to get result within timeout, timeout="
++ timeout + "ms");
   }
 } catch (ExecutionException e) {
   RpcRetryingCallerWithReadReplicas.throwEnrichedException(e, retries);
@@ -216,7 +219,8 @@ class ScannerCallableWithReplicas implements 
RetryingCallable {
   // calls succeeded or failed. In all case, we stop all our tasks.
   cs.cancelAll();
 }
-return null; // unreachable
+LOG.error("Imposible? Arrive at an unreachable line..."); // unreachable
+throw new IOException("Imposible? Arrive at an unreachable line...");
   }
 
   private void updateCurrentlyServingReplica(ScannerCallable scanner, Result[] 
result,

http://git-wip-us.apache.org/repos/asf/hbase/blob/dcc1243f/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
index beea81b..4d750c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
@@ -102,8 +102,7 @@ public class HMasterCommandLine extends ServerCommandLine {
 // minRegionServers used to be minServers.  Support it too.
 if (cmd.hasOption("minServers")) {
   String val = cmd.getOptionValue("minServers");
-  getConf().setInt("hbase.regions.server.count.min",
-  Integer.valueOf(val));
+  getConf().setInt("hbase.regions.server.count.min", 
Integer.parseInt(val));
   LOG.debug("minServers set to " + val);
 }